commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
00f83dad3a0cec2bccb4de878b477bbcf850e52d
|
core/datatypes/url.py
|
core/datatypes/url.py
|
import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if not is_url(self.value):
raise ValidationError("Invalid URL (is_url={}): {}".format(is_url(self.value), self.value))
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
|
import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
|
Raise exception on invalid URL
|
Raise exception on invalid URL
|
Python
|
apache-2.0
|
yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti
|
import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
- if not is_url(self.value):
- raise ValidationError("Invalid URL (is_url={}): {}".format(is_url(self.value), self.value))
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
|
Raise exception on invalid URL
|
## Code Before:
import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if not is_url(self.value):
raise ValidationError("Invalid URL (is_url={}): {}".format(is_url(self.value), self.value))
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
## Instruction:
Raise exception on invalid URL
## Code After:
import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
|
...
try:
if re.match("[a-zA-Z]+://", self.value) is None:
...
|
e0e2b4fc60a945e9680c171109fd1cbb6f21e304
|
celery/run_carrizo.py
|
celery/run_carrizo.py
|
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
|
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
scarplet.save_results(carrizo, res, base_dir='results/')
res.forget()
|
Add test script for Carrizo data
|
Add test script for Carrizo data
|
Python
|
mit
|
stgl/scarplet,rmsare/scarplet
|
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
+ scarplet.save_results(carrizo, res, base_dir='results/')
+ res.forget()
|
Add test script for Carrizo data
|
## Code Before:
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
## Instruction:
Add test script for Carrizo data
## Code After:
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
scarplet.save_results(carrizo, res, base_dir='results/')
res.forget()
|
...
scarplet.save_results(carrizo, res, base_dir='results/')
res.forget()
...
|
e6bfc4eb1d8f5a4d0239232fa89aa9d3d756549c
|
test/geocoders/geonames.py
|
test/geocoders/geonames.py
|
import unittest
from geopy.geocoders import GeoNames
from test.geocoders.util import GeocoderTestBase, env
@unittest.skipUnless( # pylint: disable=R0904,C0111
bool(env.get('GEONAMES_USERNAME')),
"No GEONAMES_USERNAME env variable set"
)
class GeoNamesTestCase(GeocoderTestBase):
@classmethod
def setUpClass(cls):
cls.delta = 0.04
def test_unicode_name(self):
"""
GeoNames.geocode unicode
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.geocode_run(
{"query": u"\u6545\u5bab"},
{"latitude": 30.90097, "longitude": 118.49436},
)
def test_reverse(self):
"""
GeoNames.reverse
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.reverse_run(
{"query": u"40.75376406311989, -73.98489005863667"},
{"latitude": 40.75376406311989, "longitude": -73.98489005863667},
)
|
import unittest
from geopy.geocoders import GeoNames
from test.geocoders.util import GeocoderTestBase, env
@unittest.skipUnless( # pylint: disable=R0904,C0111
bool(env.get('GEONAMES_USERNAME')),
"No GEONAMES_USERNAME env variable set"
)
class GeoNamesTestCase(GeocoderTestBase):
@classmethod
def setUpClass(cls):
cls.delta = 0.04
def test_unicode_name(self):
"""
GeoNames.geocode unicode
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.geocode_run(
{"query": u"Musée du Louvre"},
{"latitude": 48.8610, "longitude": 2.335},
)
def test_reverse(self):
"""
GeoNames.reverse
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.reverse_run(
{"query": u"40.75376406311989, -73.98489005863667"},
{"latitude": 40.75376406311989, "longitude": -73.98489005863667},
)
|
Use different location for GeoNames integration test
|
Use different location for GeoNames integration test
|
Python
|
mit
|
RDXT/geopy,Vimos/geopy,mthh/geopy,memaldi/geopy,ahlusar1989/geopy,jmb/geopy,two9seven/geopy,magnushiie/geopy,ahlusar1989/geopy,mthh/geopy,Vimos/geopy,smileliaohua/geopy,SoftwareArtisan/geopy,magnushiie/geopy,cffk/geopy,cffk/geopy,geopy/geopy,memaldi/geopy,RDXT/geopy,sebastianneubauer/geopy,sebastianneubauer/geopy,smileliaohua/geopy,two9seven/geopy,SoftwareArtisan/geopy
|
-
import unittest
from geopy.geocoders import GeoNames
from test.geocoders.util import GeocoderTestBase, env
@unittest.skipUnless( # pylint: disable=R0904,C0111
bool(env.get('GEONAMES_USERNAME')),
"No GEONAMES_USERNAME env variable set"
)
class GeoNamesTestCase(GeocoderTestBase):
@classmethod
def setUpClass(cls):
cls.delta = 0.04
def test_unicode_name(self):
"""
GeoNames.geocode unicode
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.geocode_run(
- {"query": u"\u6545\u5bab"},
+ {"query": u"Musée du Louvre"},
- {"latitude": 30.90097, "longitude": 118.49436},
+ {"latitude": 48.8610, "longitude": 2.335},
)
def test_reverse(self):
"""
GeoNames.reverse
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.reverse_run(
{"query": u"40.75376406311989, -73.98489005863667"},
{"latitude": 40.75376406311989, "longitude": -73.98489005863667},
)
|
Use different location for GeoNames integration test
|
## Code Before:
import unittest
from geopy.geocoders import GeoNames
from test.geocoders.util import GeocoderTestBase, env
@unittest.skipUnless( # pylint: disable=R0904,C0111
bool(env.get('GEONAMES_USERNAME')),
"No GEONAMES_USERNAME env variable set"
)
class GeoNamesTestCase(GeocoderTestBase):
@classmethod
def setUpClass(cls):
cls.delta = 0.04
def test_unicode_name(self):
"""
GeoNames.geocode unicode
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.geocode_run(
{"query": u"\u6545\u5bab"},
{"latitude": 30.90097, "longitude": 118.49436},
)
def test_reverse(self):
"""
GeoNames.reverse
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.reverse_run(
{"query": u"40.75376406311989, -73.98489005863667"},
{"latitude": 40.75376406311989, "longitude": -73.98489005863667},
)
## Instruction:
Use different location for GeoNames integration test
## Code After:
import unittest
from geopy.geocoders import GeoNames
from test.geocoders.util import GeocoderTestBase, env
@unittest.skipUnless( # pylint: disable=R0904,C0111
bool(env.get('GEONAMES_USERNAME')),
"No GEONAMES_USERNAME env variable set"
)
class GeoNamesTestCase(GeocoderTestBase):
@classmethod
def setUpClass(cls):
cls.delta = 0.04
def test_unicode_name(self):
"""
GeoNames.geocode unicode
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.geocode_run(
{"query": u"Musée du Louvre"},
{"latitude": 48.8610, "longitude": 2.335},
)
def test_reverse(self):
"""
GeoNames.reverse
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.reverse_run(
{"query": u"40.75376406311989, -73.98489005863667"},
{"latitude": 40.75376406311989, "longitude": -73.98489005863667},
)
|
...
import unittest
...
self.geocode_run(
{"query": u"Musée du Louvre"},
{"latitude": 48.8610, "longitude": 2.335},
)
...
|
a2b9777cc7ec4d606d3a33400c4f242bc9177fab
|
awx/main/migrations/0004_rbac_migrations.py
|
awx/main/migrations/0004_rbac_migrations.py
|
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
]
|
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
|
Add migrate_users and migrate_projects to our migration plan
|
Add migrate_users and migrate_projects to our migration plan
|
Python
|
apache-2.0
|
wwitzel3/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx
|
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
+ migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
+ migrations.RunPython(rbac.migrate_projects),
]
|
Add migrate_users and migrate_projects to our migration plan
|
## Code Before:
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
]
## Instruction:
Add migrate_users and migrate_projects to our migration plan
## Code After:
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
|
# ... existing code ...
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
# ... modified code ...
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
# ... rest of the code ...
|
b6f51e8873d1905da53027b73614f2eeb4c4ed3d
|
web/form/fields/validators.py
|
web/form/fields/validators.py
|
from wtforms.validators import Optional
class OptionalIf(Optional):
# makes a field optional if some other data is supplied
def __init__(self, deciding_field, *args, **kwargs):
self.deciding_field = deciding_field
super(OptionalIf, self).__init__(*args, **kwargs)
def __call__(self, form, field):
deciding_field = form._fields.get(self.deciding_field)
if deciding_field is None:
raise Exception('no field named "{}" in form'.format(
self.deciding_field))
if bool(deciding_field.data):
super(OptionalIf, self).__call__(form, field)
|
from wtforms.validators import Optional
class OptionalIf(Optional):
# makes a field optional if some other data is supplied or is not supplied
def __init__(self, deciding_field, invert=False, *args, **kwargs):
self.deciding_field = deciding_field
self.invert = invert
super(OptionalIf, self).__init__(*args, **kwargs)
def __call__(self, form, field):
deciding_field = form._fields.get(self.deciding_field)
if deciding_field is None:
raise Exception('no field named "{}" in form'.format(
self.deciding_field))
if (bool(deciding_field.data) and deciding_field.data != 'None')\
^ self.invert:
super(OptionalIf, self).__call__(form, field)
|
Add option to invert `OptionalIf` validator
|
Add option to invert `OptionalIf` validator
|
Python
|
apache-2.0
|
agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft
|
from wtforms.validators import Optional
class OptionalIf(Optional):
- # makes a field optional if some other data is supplied
+ # makes a field optional if some other data is supplied or is not supplied
- def __init__(self, deciding_field, *args, **kwargs):
+ def __init__(self, deciding_field, invert=False, *args, **kwargs):
self.deciding_field = deciding_field
+ self.invert = invert
super(OptionalIf, self).__init__(*args, **kwargs)
def __call__(self, form, field):
deciding_field = form._fields.get(self.deciding_field)
if deciding_field is None:
raise Exception('no field named "{}" in form'.format(
self.deciding_field))
- if bool(deciding_field.data):
+ if (bool(deciding_field.data) and deciding_field.data != 'None')\
+ ^ self.invert:
super(OptionalIf, self).__call__(form, field)
-
|
Add option to invert `OptionalIf` validator
|
## Code Before:
from wtforms.validators import Optional
class OptionalIf(Optional):
# makes a field optional if some other data is supplied
def __init__(self, deciding_field, *args, **kwargs):
self.deciding_field = deciding_field
super(OptionalIf, self).__init__(*args, **kwargs)
def __call__(self, form, field):
deciding_field = form._fields.get(self.deciding_field)
if deciding_field is None:
raise Exception('no field named "{}" in form'.format(
self.deciding_field))
if bool(deciding_field.data):
super(OptionalIf, self).__call__(form, field)
## Instruction:
Add option to invert `OptionalIf` validator
## Code After:
from wtforms.validators import Optional
class OptionalIf(Optional):
# makes a field optional if some other data is supplied or is not supplied
def __init__(self, deciding_field, invert=False, *args, **kwargs):
self.deciding_field = deciding_field
self.invert = invert
super(OptionalIf, self).__init__(*args, **kwargs)
def __call__(self, form, field):
deciding_field = form._fields.get(self.deciding_field)
if deciding_field is None:
raise Exception('no field named "{}" in form'.format(
self.deciding_field))
if (bool(deciding_field.data) and deciding_field.data != 'None')\
^ self.invert:
super(OptionalIf, self).__call__(form, field)
|
...
class OptionalIf(Optional):
# makes a field optional if some other data is supplied or is not supplied
def __init__(self, deciding_field, invert=False, *args, **kwargs):
self.deciding_field = deciding_field
self.invert = invert
super(OptionalIf, self).__init__(*args, **kwargs)
...
self.deciding_field))
if (bool(deciding_field.data) and deciding_field.data != 'None')\
^ self.invert:
super(OptionalIf, self).__call__(form, field)
...
|
d174159ef6af50ec28146fd0a91ea3d677ee234f
|
tests/integration/test_redirection_absolute.py
|
tests/integration/test_redirection_absolute.py
|
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
nikola.utils.makedirs(os.path.join(target_dir, "files", "foo"))
target_path = os.path.join(target_dir, "files", "foo", "bar.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("foo")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/foo.html", "/foo/bar.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
redirects_dir = os.path.join(target_dir, "files", "redirects")
nikola.utils.makedirs(redirects_dir)
target_path = os.path.join(redirects_dir, "absolute_source.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("absolute")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/absolute.html", "/redirects/absolute_source.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
Refactor tests in preparation of the merge of redirect tests.
|
Refactor tests in preparation of the merge of redirect tests.
|
Python
|
mit
|
getnikola/nikola,okin/nikola,getnikola/nikola,okin/nikola,getnikola/nikola,okin/nikola,okin/nikola,getnikola/nikola
|
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
- nikola.utils.makedirs(os.path.join(target_dir, "files", "foo"))
+ redirects_dir = os.path.join(target_dir, "files", "redirects")
+ nikola.utils.makedirs(redirects_dir)
- target_path = os.path.join(target_dir, "files", "foo", "bar.html")
+ target_path = os.path.join(redirects_dir, "absolute_source.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
- outf.write("foo")
+ outf.write("absolute")
append_config(
target_dir,
"""
- REDIRECTIONS = [ ("posts/foo.html", "/foo/bar.html"), ]
+ REDIRECTIONS = [ ("posts/absolute.html", "/redirects/absolute_source.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
Refactor tests in preparation of the merge of redirect tests.
|
## Code Before:
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
nikola.utils.makedirs(os.path.join(target_dir, "files", "foo"))
target_path = os.path.join(target_dir, "files", "foo", "bar.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("foo")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/foo.html", "/foo/bar.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
## Instruction:
Refactor tests in preparation of the merge of redirect tests.
## Code After:
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
redirects_dir = os.path.join(target_dir, "files", "redirects")
nikola.utils.makedirs(redirects_dir)
target_path = os.path.join(redirects_dir, "absolute_source.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("absolute")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/absolute.html", "/redirects/absolute_source.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
...
redirects_dir = os.path.join(target_dir, "files", "redirects")
nikola.utils.makedirs(redirects_dir)
target_path = os.path.join(redirects_dir, "absolute_source.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("absolute")
...
"""
REDIRECTIONS = [ ("posts/absolute.html", "/redirects/absolute_source.html"), ]
""",
...
|
c341259964b4874656f0394d459fe46b5e7d010e
|
temporenc/__init__.py
|
temporenc/__init__.py
|
# Export public API
from .temporenc import ( # noqa
pack,
packb,
unpack,
unpackb,
Moment,
)
|
__version__ = '0.1.0'
__version_info__ = tuple(map(int, __version__.split('.')))
# Export public API
from .temporenc import ( # noqa
pack,
packb,
unpack,
unpackb,
Moment,
)
|
Add __version__ and __version_info__ package attributes
|
Add __version__ and __version_info__ package attributes
See #3.
|
Python
|
bsd-3-clause
|
wbolster/temporenc-python
|
+
+ __version__ = '0.1.0'
+ __version_info__ = tuple(map(int, __version__.split('.')))
+
# Export public API
-
from .temporenc import ( # noqa
pack,
packb,
unpack,
unpackb,
Moment,
)
|
Add __version__ and __version_info__ package attributes
|
## Code Before:
# Export public API
from .temporenc import ( # noqa
pack,
packb,
unpack,
unpackb,
Moment,
)
## Instruction:
Add __version__ and __version_info__ package attributes
## Code After:
__version__ = '0.1.0'
__version_info__ = tuple(map(int, __version__.split('.')))
# Export public API
from .temporenc import ( # noqa
pack,
packb,
unpack,
unpackb,
Moment,
)
|
// ... existing code ...
__version__ = '0.1.0'
__version_info__ = tuple(map(int, __version__.split('.')))
// ... modified code ...
# Export public API
from .temporenc import ( # noqa
// ... rest of the code ...
|
41a83c6742f0e688dad5a98761c0f0415c77bac9
|
outgoing_mail.py
|
outgoing_mail.py
|
from google.appengine.api import mail
from google.appengine.ext.webapp import template
import os
from_address = '"EventBot" <[email protected]>'
def send(to, template_name, values):
path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name)
message = mail.EmailMessage(sender=from_address, to=to)
message.subject = template.render(path + '.subject', values)
message.body = template.render(path + '.body', values)
message.send()
|
from google.appengine.api import mail
from google.appengine.ext.webapp import template
from google.appengine.api import memcache
from datetime import datetime
import os
from_address = '"EventBot" <[email protected]>'
email_interval = 10
def send(to, template_name, values):
"""Send an email to the specified address using a template. No
more than one email per EMAIL_INTERVAL seconds will be sent to any
given address.
"""
last_action = memcache.get(to, namespace='last_action')
if last_action != None:
return
path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name)
message = mail.EmailMessage(sender=from_address, to=to)
message.subject = template.render(path + '.subject', values)
message.body = template.render(path + '.body', values)
message.send()
memcache.set(to, datetime.now(), time=email_interval, namespace='last_action')
|
Use memcache to rate-limit outgoing emails.
|
Use memcache to rate-limit outgoing emails.
|
Python
|
mit
|
eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot
|
from google.appengine.api import mail
from google.appengine.ext.webapp import template
+ from google.appengine.api import memcache
+ from datetime import datetime
import os
from_address = '"EventBot" <[email protected]>'
+ email_interval = 10
def send(to, template_name, values):
+ """Send an email to the specified address using a template. No
+ more than one email per EMAIL_INTERVAL seconds will be sent to any
+ given address.
+ """
+ last_action = memcache.get(to, namespace='last_action')
+ if last_action != None:
+ return
path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name)
message = mail.EmailMessage(sender=from_address, to=to)
message.subject = template.render(path + '.subject', values)
message.body = template.render(path + '.body', values)
message.send()
+ memcache.set(to, datetime.now(), time=email_interval, namespace='last_action')
|
Use memcache to rate-limit outgoing emails.
|
## Code Before:
from google.appengine.api import mail
from google.appengine.ext.webapp import template
import os
from_address = '"EventBot" <[email protected]>'
def send(to, template_name, values):
path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name)
message = mail.EmailMessage(sender=from_address, to=to)
message.subject = template.render(path + '.subject', values)
message.body = template.render(path + '.body', values)
message.send()
## Instruction:
Use memcache to rate-limit outgoing emails.
## Code After:
from google.appengine.api import mail
from google.appengine.ext.webapp import template
from google.appengine.api import memcache
from datetime import datetime
import os
from_address = '"EventBot" <[email protected]>'
email_interval = 10
def send(to, template_name, values):
"""Send an email to the specified address using a template. No
more than one email per EMAIL_INTERVAL seconds will be sent to any
given address.
"""
last_action = memcache.get(to, namespace='last_action')
if last_action != None:
return
path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name)
message = mail.EmailMessage(sender=from_address, to=to)
message.subject = template.render(path + '.subject', values)
message.body = template.render(path + '.body', values)
message.send()
memcache.set(to, datetime.now(), time=email_interval, namespace='last_action')
|
// ... existing code ...
from google.appengine.ext.webapp import template
from google.appengine.api import memcache
from datetime import datetime
import os
// ... modified code ...
from_address = '"EventBot" <[email protected]>'
email_interval = 10
...
def send(to, template_name, values):
"""Send an email to the specified address using a template. No
more than one email per EMAIL_INTERVAL seconds will be sent to any
given address.
"""
last_action = memcache.get(to, namespace='last_action')
if last_action != None:
return
path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name)
...
message.send()
memcache.set(to, datetime.now(), time=email_interval, namespace='last_action')
// ... rest of the code ...
|
5a3935caab0bf720db6707bb7974eec2400f3701
|
prompt_toolkit/key_binding/bindings/auto_suggest.py
|
prompt_toolkit/key_binding/bindings/auto_suggest.py
|
from __future__ import unicode_literals
from prompt_toolkit.application.current import get_app
from prompt_toolkit.key_binding.key_bindings import KeyBindings
from prompt_toolkit.filters import Condition
__all__ = [
'load_auto_suggest_bindings',
]
def load_auto_suggest_bindings():
"""
Key bindings for accepting auto suggestion text.
(This has to come after the Vi bindings, because they also have an
implementation for the "right arrow", but we really want the suggestion
binding when a suggestion is available.)
"""
key_bindings = KeyBindings()
handle = key_bindings.add
@Condition
def suggestion_available():
app = get_app()
return (app.current_buffer.suggestion is not None and
app.current_buffer.document.is_cursor_at_the_end)
@handle('c-f', filter=suggestion_available)
@handle('c-e', filter=suggestion_available)
@handle('right', filter=suggestion_available)
def _(event):
" Accept suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
b.insert_text(suggestion.text)
return key_bindings
|
from __future__ import unicode_literals
import re
from prompt_toolkit.application.current import get_app
from prompt_toolkit.key_binding.key_bindings import KeyBindings
from prompt_toolkit.filters import Condition, emacs_mode
__all__ = [
'load_auto_suggest_bindings',
]
def load_auto_suggest_bindings():
"""
Key bindings for accepting auto suggestion text.
(This has to come after the Vi bindings, because they also have an
implementation for the "right arrow", but we really want the suggestion
binding when a suggestion is available.)
"""
key_bindings = KeyBindings()
handle = key_bindings.add
@Condition
def suggestion_available():
app = get_app()
return (app.current_buffer.suggestion is not None and
app.current_buffer.document.is_cursor_at_the_end)
@handle('c-f', filter=suggestion_available)
@handle('c-e', filter=suggestion_available)
@handle('right', filter=suggestion_available)
def _(event):
" Accept suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
b.insert_text(suggestion.text)
@handle('escape', 'f', filter=suggestion_available & emacs_mode)
def _(event):
" Fill partial suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
t = re.split(r'(\S+\s+)', suggestion.text)
b.insert_text(next(x for x in t if x))
return key_bindings
|
Add alt-f binding for auto-suggestion.
|
Add alt-f binding for auto-suggestion.
|
Python
|
bsd-3-clause
|
jonathanslenders/python-prompt-toolkit
|
from __future__ import unicode_literals
+ import re
from prompt_toolkit.application.current import get_app
from prompt_toolkit.key_binding.key_bindings import KeyBindings
- from prompt_toolkit.filters import Condition
+ from prompt_toolkit.filters import Condition, emacs_mode
__all__ = [
'load_auto_suggest_bindings',
]
def load_auto_suggest_bindings():
"""
Key bindings for accepting auto suggestion text.
(This has to come after the Vi bindings, because they also have an
implementation for the "right arrow", but we really want the suggestion
binding when a suggestion is available.)
"""
key_bindings = KeyBindings()
handle = key_bindings.add
@Condition
def suggestion_available():
app = get_app()
return (app.current_buffer.suggestion is not None and
app.current_buffer.document.is_cursor_at_the_end)
@handle('c-f', filter=suggestion_available)
@handle('c-e', filter=suggestion_available)
@handle('right', filter=suggestion_available)
def _(event):
" Accept suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
b.insert_text(suggestion.text)
+ @handle('escape', 'f', filter=suggestion_available & emacs_mode)
+ def _(event):
+ " Fill partial suggestion. "
+ b = event.current_buffer
+ suggestion = b.suggestion
+
+ if suggestion:
+ t = re.split(r'(\S+\s+)', suggestion.text)
+ b.insert_text(next(x for x in t if x))
+
return key_bindings
|
Add alt-f binding for auto-suggestion.
|
## Code Before:
from __future__ import unicode_literals
from prompt_toolkit.application.current import get_app
from prompt_toolkit.key_binding.key_bindings import KeyBindings
from prompt_toolkit.filters import Condition
__all__ = [
'load_auto_suggest_bindings',
]
def load_auto_suggest_bindings():
"""
Key bindings for accepting auto suggestion text.
(This has to come after the Vi bindings, because they also have an
implementation for the "right arrow", but we really want the suggestion
binding when a suggestion is available.)
"""
key_bindings = KeyBindings()
handle = key_bindings.add
@Condition
def suggestion_available():
app = get_app()
return (app.current_buffer.suggestion is not None and
app.current_buffer.document.is_cursor_at_the_end)
@handle('c-f', filter=suggestion_available)
@handle('c-e', filter=suggestion_available)
@handle('right', filter=suggestion_available)
def _(event):
" Accept suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
b.insert_text(suggestion.text)
return key_bindings
## Instruction:
Add alt-f binding for auto-suggestion.
## Code After:
from __future__ import unicode_literals
import re
from prompt_toolkit.application.current import get_app
from prompt_toolkit.key_binding.key_bindings import KeyBindings
from prompt_toolkit.filters import Condition, emacs_mode
__all__ = [
'load_auto_suggest_bindings',
]
def load_auto_suggest_bindings():
"""
Key bindings for accepting auto suggestion text.
(This has to come after the Vi bindings, because they also have an
implementation for the "right arrow", but we really want the suggestion
binding when a suggestion is available.)
"""
key_bindings = KeyBindings()
handle = key_bindings.add
@Condition
def suggestion_available():
app = get_app()
return (app.current_buffer.suggestion is not None and
app.current_buffer.document.is_cursor_at_the_end)
@handle('c-f', filter=suggestion_available)
@handle('c-e', filter=suggestion_available)
@handle('right', filter=suggestion_available)
def _(event):
" Accept suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
b.insert_text(suggestion.text)
@handle('escape', 'f', filter=suggestion_available & emacs_mode)
def _(event):
" Fill partial suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
t = re.split(r'(\S+\s+)', suggestion.text)
b.insert_text(next(x for x in t if x))
return key_bindings
|
...
from __future__ import unicode_literals
import re
from prompt_toolkit.application.current import get_app
...
from prompt_toolkit.key_binding.key_bindings import KeyBindings
from prompt_toolkit.filters import Condition, emacs_mode
...
@handle('escape', 'f', filter=suggestion_available & emacs_mode)
def _(event):
" Fill partial suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
t = re.split(r'(\S+\s+)', suggestion.text)
b.insert_text(next(x for x in t if x))
return key_bindings
...
|
27b9bd22bb43b8b86ae1c40a90c1fae7157dcb86
|
app/tests.py
|
app/tests.py
|
from app.test_base import BaseTestCase
class TestTopLevelFunctions(BaseTestCase):
def test_index_response(self):
response = self.client.get('/')
self.assert200(response)
|
from app.test_base import BaseTestCase
class TestTopLevelFunctions(BaseTestCase):
def test_index_response(self):
response = self.client.get('/')
self.assert200(response)
def test_login_required(self):
self.check_login_required('/scores/add', '/login?next=%2Fscores%2Fadd')
self.check_login_required('/judging/presentation/new', '/login?next=%2Fjudging%2Fpresentation%2Fnew')
self.check_login_required('/judging/technical/new', '/login?next=%2Fjudging%2Ftechnical%2Fnew')
self.check_login_required('/judging/core_values/new', '/login?next=%2Fjudging%2Fcore_values%2Fnew')
self.check_login_required('/settings', '/login?next=%2Fsettings')
self.check_login_required('/review', '/login?next=%2Freview')
self.check_login_required('/teams/new', '/login?next=%2Fteams%2Fnew')
self.check_login_required('/scores/playoffs', '/login?next=%2Fscores%2Fplayoffs')
def check_login_required(self, attempted_location, redirected_location):
response = self.client.get(attempted_location)
self.assertTrue(response.status_code in (301, 302))
self.assertEqual(response.location, 'http://' + self.app.config['SERVER_NAME'] + redirected_location)
|
Add test to verify login required for protected pages
|
Add test to verify login required for protected pages
|
Python
|
mit
|
rtfoley/scorepy,rtfoley/scorepy,rtfoley/scorepy
|
from app.test_base import BaseTestCase
class TestTopLevelFunctions(BaseTestCase):
def test_index_response(self):
response = self.client.get('/')
self.assert200(response)
+ def test_login_required(self):
+ self.check_login_required('/scores/add', '/login?next=%2Fscores%2Fadd')
+ self.check_login_required('/judging/presentation/new', '/login?next=%2Fjudging%2Fpresentation%2Fnew')
+ self.check_login_required('/judging/technical/new', '/login?next=%2Fjudging%2Ftechnical%2Fnew')
+ self.check_login_required('/judging/core_values/new', '/login?next=%2Fjudging%2Fcore_values%2Fnew')
+ self.check_login_required('/settings', '/login?next=%2Fsettings')
+ self.check_login_required('/review', '/login?next=%2Freview')
+ self.check_login_required('/teams/new', '/login?next=%2Fteams%2Fnew')
+ self.check_login_required('/scores/playoffs', '/login?next=%2Fscores%2Fplayoffs')
+
+ def check_login_required(self, attempted_location, redirected_location):
+ response = self.client.get(attempted_location)
+ self.assertTrue(response.status_code in (301, 302))
+ self.assertEqual(response.location, 'http://' + self.app.config['SERVER_NAME'] + redirected_location)
+
|
Add test to verify login required for protected pages
|
## Code Before:
from app.test_base import BaseTestCase
class TestTopLevelFunctions(BaseTestCase):
def test_index_response(self):
response = self.client.get('/')
self.assert200(response)
## Instruction:
Add test to verify login required for protected pages
## Code After:
from app.test_base import BaseTestCase
class TestTopLevelFunctions(BaseTestCase):
def test_index_response(self):
response = self.client.get('/')
self.assert200(response)
def test_login_required(self):
self.check_login_required('/scores/add', '/login?next=%2Fscores%2Fadd')
self.check_login_required('/judging/presentation/new', '/login?next=%2Fjudging%2Fpresentation%2Fnew')
self.check_login_required('/judging/technical/new', '/login?next=%2Fjudging%2Ftechnical%2Fnew')
self.check_login_required('/judging/core_values/new', '/login?next=%2Fjudging%2Fcore_values%2Fnew')
self.check_login_required('/settings', '/login?next=%2Fsettings')
self.check_login_required('/review', '/login?next=%2Freview')
self.check_login_required('/teams/new', '/login?next=%2Fteams%2Fnew')
self.check_login_required('/scores/playoffs', '/login?next=%2Fscores%2Fplayoffs')
def check_login_required(self, attempted_location, redirected_location):
response = self.client.get(attempted_location)
self.assertTrue(response.status_code in (301, 302))
self.assertEqual(response.location, 'http://' + self.app.config['SERVER_NAME'] + redirected_location)
|
# ... existing code ...
self.assert200(response)
def test_login_required(self):
self.check_login_required('/scores/add', '/login?next=%2Fscores%2Fadd')
self.check_login_required('/judging/presentation/new', '/login?next=%2Fjudging%2Fpresentation%2Fnew')
self.check_login_required('/judging/technical/new', '/login?next=%2Fjudging%2Ftechnical%2Fnew')
self.check_login_required('/judging/core_values/new', '/login?next=%2Fjudging%2Fcore_values%2Fnew')
self.check_login_required('/settings', '/login?next=%2Fsettings')
self.check_login_required('/review', '/login?next=%2Freview')
self.check_login_required('/teams/new', '/login?next=%2Fteams%2Fnew')
self.check_login_required('/scores/playoffs', '/login?next=%2Fscores%2Fplayoffs')
def check_login_required(self, attempted_location, redirected_location):
response = self.client.get(attempted_location)
self.assertTrue(response.status_code in (301, 302))
self.assertEqual(response.location, 'http://' + self.app.config['SERVER_NAME'] + redirected_location)
# ... rest of the code ...
|
e7149a488eaa85baecacfdf78a5d190b51dc46d7
|
tests/test_upgrade.py
|
tests/test_upgrade.py
|
import shutil
import tempfile
from os import path
import unittest
from libs.qpanel.upgrader import __first_line as firstline, get_current_version
class UpgradeTestClass(unittest.TestCase):
def setUp(self):
# Create a temporary directory
self.test_dir = tempfile.mkdtemp()
def tearDown(self):
# Remove the directory after the test
shutil.rmtree(self.test_dir)
def test_first_line(self):
content = 'a\n\b\t\b'
self.assertEqual(firstline(content), 'a')
self.assertNotEqual(firstline(content), 'ab')
def test_version(self):
version = '0.10'
version_file = path.join(self.test_dir, 'VERSION')
f = open(version_file, 'w')
f.write(version)
f.close()
self.assertEqual(get_current_version(version_file), version)
# runs the unit tests
if __name__ == '__main__':
unittest.main()
|
import shutil
import tempfile
from os import path
import unittest
from libs.qpanel.upgrader import __first_line as firstline, get_current_version
class UpgradeTestClass(unittest.TestCase):
def setUp(self):
# Create a temporary directory
self.test_dir = tempfile.mkdtemp()
def tearDown(self):
# Remove the directory after the test
shutil.rmtree(self.test_dir)
def test_first_line(self):
content = 'a\n\b\t\b'
self.assertEqual(firstline(content), 'a')
self.assertNotEqual(firstline(content), 'ab')
def test_version(self):
version = '0.10'
version_file = path.join(self.test_dir, 'VERSION')
f = open(version_file, 'w')
f.write(version)
f.close()
self.assertEqual(get_current_version(version_file), version)
self.assertNotEqual(get_current_version(version_file), '0.11.0')
# runs the unit tests
if __name__ == '__main__':
unittest.main()
|
Add not equals test for version function
|
Add not equals test for version function
|
Python
|
mit
|
roramirez/qpanel,skazancev/qpanel,roramirez/qpanel,skazancev/qpanel,roramirez/qpanel,skazancev/qpanel,skazancev/qpanel,roramirez/qpanel
|
import shutil
import tempfile
from os import path
import unittest
from libs.qpanel.upgrader import __first_line as firstline, get_current_version
class UpgradeTestClass(unittest.TestCase):
def setUp(self):
# Create a temporary directory
self.test_dir = tempfile.mkdtemp()
def tearDown(self):
# Remove the directory after the test
shutil.rmtree(self.test_dir)
def test_first_line(self):
content = 'a\n\b\t\b'
self.assertEqual(firstline(content), 'a')
self.assertNotEqual(firstline(content), 'ab')
def test_version(self):
version = '0.10'
version_file = path.join(self.test_dir, 'VERSION')
f = open(version_file, 'w')
f.write(version)
f.close()
self.assertEqual(get_current_version(version_file), version)
+ self.assertNotEqual(get_current_version(version_file), '0.11.0')
# runs the unit tests
if __name__ == '__main__':
unittest.main()
|
Add not equals test for version function
|
## Code Before:
import shutil
import tempfile
from os import path
import unittest
from libs.qpanel.upgrader import __first_line as firstline, get_current_version
class UpgradeTestClass(unittest.TestCase):
def setUp(self):
# Create a temporary directory
self.test_dir = tempfile.mkdtemp()
def tearDown(self):
# Remove the directory after the test
shutil.rmtree(self.test_dir)
def test_first_line(self):
content = 'a\n\b\t\b'
self.assertEqual(firstline(content), 'a')
self.assertNotEqual(firstline(content), 'ab')
def test_version(self):
version = '0.10'
version_file = path.join(self.test_dir, 'VERSION')
f = open(version_file, 'w')
f.write(version)
f.close()
self.assertEqual(get_current_version(version_file), version)
# runs the unit tests
if __name__ == '__main__':
unittest.main()
## Instruction:
Add not equals test for version function
## Code After:
import shutil
import tempfile
from os import path
import unittest
from libs.qpanel.upgrader import __first_line as firstline, get_current_version
class UpgradeTestClass(unittest.TestCase):
def setUp(self):
# Create a temporary directory
self.test_dir = tempfile.mkdtemp()
def tearDown(self):
# Remove the directory after the test
shutil.rmtree(self.test_dir)
def test_first_line(self):
content = 'a\n\b\t\b'
self.assertEqual(firstline(content), 'a')
self.assertNotEqual(firstline(content), 'ab')
def test_version(self):
version = '0.10'
version_file = path.join(self.test_dir, 'VERSION')
f = open(version_file, 'w')
f.write(version)
f.close()
self.assertEqual(get_current_version(version_file), version)
self.assertNotEqual(get_current_version(version_file), '0.11.0')
# runs the unit tests
if __name__ == '__main__':
unittest.main()
|
...
self.assertEqual(get_current_version(version_file), version)
self.assertNotEqual(get_current_version(version_file), '0.11.0')
...
|
96f9819ab67b48135a61c8a1e15bc808cf82d194
|
bokeh/models/widget.py
|
bokeh/models/widget.py
|
from __future__ import absolute_import
from ..plot_object import PlotObject
from ..properties import Bool
class Widget(PlotObject):
disabled = Bool(False)
|
from __future__ import absolute_import
from ..plot_object import PlotObject
from ..properties import Bool
from ..embed import notebook_div
class Widget(PlotObject):
disabled = Bool(False)
def _repr_html_(self):
return notebook_div(self)
@property
def html(self):
from IPython.core.display import HTML
return HTML(self._repr_html_())
|
Implement display protocol for Widget (_repr_html_)
|
Implement display protocol for Widget (_repr_html_)
This effectively allows us to automatically display plots and widgets.
|
Python
|
bsd-3-clause
|
evidation-health/bokeh,abele/bokeh,mutirri/bokeh,percyfal/bokeh,htygithub/bokeh,jakirkham/bokeh,rhiever/bokeh,DuCorey/bokeh,srinathv/bokeh,DuCorey/bokeh,awanke/bokeh,clairetang6/bokeh,ericdill/bokeh,ahmadia/bokeh,saifrahmed/bokeh,mutirri/bokeh,bokeh/bokeh,gpfreitas/bokeh,philippjfr/bokeh,xguse/bokeh,srinathv/bokeh,draperjames/bokeh,schoolie/bokeh,laurent-george/bokeh,paultcochrane/bokeh,akloster/bokeh,caseyclements/bokeh,justacec/bokeh,maxalbert/bokeh,philippjfr/bokeh,birdsarah/bokeh,evidation-health/bokeh,rs2/bokeh,phobson/bokeh,PythonCharmers/bokeh,draperjames/bokeh,satishgoda/bokeh,mindriot101/bokeh,PythonCharmers/bokeh,CrazyGuo/bokeh,mindriot101/bokeh,birdsarah/bokeh,jplourenco/bokeh,matbra/bokeh,htygithub/bokeh,deeplook/bokeh,abele/bokeh,bsipocz/bokeh,rhiever/bokeh,laurent-george/bokeh,ericmjl/bokeh,htygithub/bokeh,DuCorey/bokeh,justacec/bokeh,PythonCharmers/bokeh,msarahan/bokeh,mutirri/bokeh,percyfal/bokeh,timsnyder/bokeh,timsnyder/bokeh,muku42/bokeh,deeplook/bokeh,xguse/bokeh,daodaoliang/bokeh,ChristosChristofidis/bokeh,ericmjl/bokeh,timothydmorton/bokeh,percyfal/bokeh,schoolie/bokeh,alan-unravel/bokeh,jplourenco/bokeh,canavandl/bokeh,Karel-van-de-Plassche/bokeh,bokeh/bokeh,evidation-health/bokeh,Karel-van-de-Plassche/bokeh,tacaswell/bokeh,bsipocz/bokeh,mutirri/bokeh,deeplook/bokeh,dennisobrien/bokeh,msarahan/bokeh,quasiben/bokeh,roxyboy/bokeh,josherick/bokeh,mindriot101/bokeh,saifrahmed/bokeh,rothnic/bokeh,CrazyGuo/bokeh,canavandl/bokeh,aiguofer/bokeh,akloster/bokeh,clairetang6/bokeh,almarklein/bokeh,josherick/bokeh,aiguofer/bokeh,timothydmorton/bokeh,ptitjano/bokeh,KasperPRasmussen/bokeh,mindriot101/bokeh,aavanian/bokeh,josherick/bokeh,quasiben/bokeh,xguse/bokeh,saifrahmed/bokeh,KasperPRasmussen/bokeh,akloster/bokeh,awanke/bokeh,ptitjano/bokeh,aavanian/bokeh,azjps/bokeh,tacaswell/bokeh,draperjames/bokeh,alan-unravel/bokeh,ericmjl/bokeh,rs2/bokeh,bokeh/bokeh,stonebig/bokeh,tacaswell/bokeh,ChinaQuants/bokeh,stonebig/bokeh,stuart-knock/bokeh,paultcochrane/bokeh,xguse/bokeh,jakirkham/bokeh,abele/bokeh,alan-unravel/bokeh,KasperPRasmussen/bokeh,birdsarah/bokeh,stuart-knock/bokeh,Karel-van-de-Plassche/bokeh,carlvlewis/bokeh,gpfreitas/bokeh,dennisobrien/bokeh,deeplook/bokeh,alan-unravel/bokeh,lukebarnard1/bokeh,jakirkham/bokeh,ahmadia/bokeh,aavanian/bokeh,phobson/bokeh,clairetang6/bokeh,timsnyder/bokeh,ptitjano/bokeh,ahmadia/bokeh,lukebarnard1/bokeh,rs2/bokeh,tacaswell/bokeh,ericdill/bokeh,matbra/bokeh,satishgoda/bokeh,awanke/bokeh,rothnic/bokeh,evidation-health/bokeh,jplourenco/bokeh,muku42/bokeh,CrazyGuo/bokeh,roxyboy/bokeh,bokeh/bokeh,caseyclements/bokeh,jplourenco/bokeh,matbra/bokeh,gpfreitas/bokeh,ChinaQuants/bokeh,ChinaQuants/bokeh,KasperPRasmussen/bokeh,dennisobrien/bokeh,saifrahmed/bokeh,timothydmorton/bokeh,rhiever/bokeh,timsnyder/bokeh,maxalbert/bokeh,DuCorey/bokeh,azjps/bokeh,birdsarah/bokeh,satishgoda/bokeh,stonebig/bokeh,srinathv/bokeh,rs2/bokeh,aiguofer/bokeh,schoolie/bokeh,rothnic/bokeh,philippjfr/bokeh,laurent-george/bokeh,stonebig/bokeh,matbra/bokeh,justacec/bokeh,maxalbert/bokeh,percyfal/bokeh,jakirkham/bokeh,eteq/bokeh,eteq/bokeh,rs2/bokeh,philippjfr/bokeh,daodaoliang/bokeh,ericdill/bokeh,azjps/bokeh,khkaminska/bokeh,draperjames/bokeh,philippjfr/bokeh,almarklein/bokeh,canavandl/bokeh,ericmjl/bokeh,clairetang6/bokeh,ptitjano/bokeh,srinathv/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,htygithub/bokeh,carlvlewis/bokeh,ptitjano/bokeh,aiguofer/bokeh,laurent-george/bokeh,lukebarnard1/bokeh,ChristosChristofidis/bokeh,abele/bokeh,ChristosChristofidis/bokeh,azjps/bokeh,draperjames/bokeh,jakirkham/bokeh,roxyboy/bokeh,Karel-van-de-Plassche/bokeh,roxyboy/bokeh,khkaminska/bokeh,phobson/bokeh,caseyclements/bokeh,paultcochrane/bokeh,percyfal/bokeh,caseyclements/bokeh,muku42/bokeh,eteq/bokeh,msarahan/bokeh,aiguofer/bokeh,almarklein/bokeh,ChinaQuants/bokeh,ericdill/bokeh,PythonCharmers/bokeh,khkaminska/bokeh,carlvlewis/bokeh,canavandl/bokeh,bokeh/bokeh,timsnyder/bokeh,eteq/bokeh,muku42/bokeh,rothnic/bokeh,ahmadia/bokeh,timothydmorton/bokeh,DuCorey/bokeh,stuart-knock/bokeh,bsipocz/bokeh,phobson/bokeh,dennisobrien/bokeh,stuart-knock/bokeh,CrazyGuo/bokeh,aavanian/bokeh,schoolie/bokeh,phobson/bokeh,dennisobrien/bokeh,akloster/bokeh,bsipocz/bokeh,paultcochrane/bokeh,josherick/bokeh,daodaoliang/bokeh,schoolie/bokeh,rhiever/bokeh,maxalbert/bokeh,satishgoda/bokeh,ChristosChristofidis/bokeh,msarahan/bokeh,carlvlewis/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,lukebarnard1/bokeh,daodaoliang/bokeh,azjps/bokeh,awanke/bokeh,khkaminska/bokeh,gpfreitas/bokeh,aavanian/bokeh,quasiben/bokeh
|
from __future__ import absolute_import
from ..plot_object import PlotObject
from ..properties import Bool
+ from ..embed import notebook_div
class Widget(PlotObject):
disabled = Bool(False)
+ def _repr_html_(self):
+ return notebook_div(self)
+
+ @property
+ def html(self):
+ from IPython.core.display import HTML
+ return HTML(self._repr_html_())
+
|
Implement display protocol for Widget (_repr_html_)
|
## Code Before:
from __future__ import absolute_import
from ..plot_object import PlotObject
from ..properties import Bool
class Widget(PlotObject):
disabled = Bool(False)
## Instruction:
Implement display protocol for Widget (_repr_html_)
## Code After:
from __future__ import absolute_import
from ..plot_object import PlotObject
from ..properties import Bool
from ..embed import notebook_div
class Widget(PlotObject):
disabled = Bool(False)
def _repr_html_(self):
return notebook_div(self)
@property
def html(self):
from IPython.core.display import HTML
return HTML(self._repr_html_())
|
// ... existing code ...
from ..properties import Bool
from ..embed import notebook_div
// ... modified code ...
disabled = Bool(False)
def _repr_html_(self):
return notebook_div(self)
@property
def html(self):
from IPython.core.display import HTML
return HTML(self._repr_html_())
// ... rest of the code ...
|
ad069a50ec7a4b4e6b1dac679e071279e128c824
|
been/source/markdowndirectory.py
|
been/source/markdowndirectory.py
|
from been.core import DirectorySource, source_registry
from hashlib import sha1
import re
import unicodedata
import time
import markdown
# slugify from Django source (BSD license)
def slugify(value):
value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
return re.sub('[-\s]+', '-', value)
class MarkdownDirectory(DirectorySource):
kind = 'markdown'
def process_event(self, event):
md = markdown.Markdown(extensions=['meta'])
event['content'] = md.convert(event['raw'])
event['title'] = ' '.join(md.Meta.get('title', [event['filename']]))
event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])]))
event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]]))
if md.Meta.get('published'):
# Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT)
event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S')))
event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest()
if time.gmtime() < event['timestamp']:
return None
else:
return event
source_registry.add(MarkdownDirectory)
|
from been.core import DirectorySource, source_registry
from hashlib import sha1
import re
import unicodedata
import time
import markdown
# slugify from Django source (BSD license)
def slugify(value):
value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
return re.sub('[-\s]+', '-', value)
class MarkdownDirectory(DirectorySource):
kind = 'markdown'
def process_event(self, event):
md = markdown.Markdown(extensions=['meta'])
event['content'] = md.convert(event['raw'])
event['title'] = ' '.join(md.Meta.get('title', [event['filename']]))
event['author'] = ' '.join(md.Meta.get('author', ['']))
event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])]))
event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]]))
if md.Meta.get('published'):
# Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT)
event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S')))
event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest()
if time.gmtime() < event['timestamp']:
return None
else:
return event
source_registry.add(MarkdownDirectory)
|
Allow MarkdownDirectory events to specify author.
|
Allow MarkdownDirectory events to specify author.
|
Python
|
bsd-3-clause
|
chromakode/been
|
from been.core import DirectorySource, source_registry
from hashlib import sha1
import re
import unicodedata
import time
import markdown
# slugify from Django source (BSD license)
def slugify(value):
value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
return re.sub('[-\s]+', '-', value)
class MarkdownDirectory(DirectorySource):
kind = 'markdown'
def process_event(self, event):
md = markdown.Markdown(extensions=['meta'])
event['content'] = md.convert(event['raw'])
event['title'] = ' '.join(md.Meta.get('title', [event['filename']]))
+ event['author'] = ' '.join(md.Meta.get('author', ['']))
event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])]))
event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]]))
if md.Meta.get('published'):
# Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT)
event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S')))
event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest()
if time.gmtime() < event['timestamp']:
return None
else:
return event
source_registry.add(MarkdownDirectory)
|
Allow MarkdownDirectory events to specify author.
|
## Code Before:
from been.core import DirectorySource, source_registry
from hashlib import sha1
import re
import unicodedata
import time
import markdown
# slugify from Django source (BSD license)
def slugify(value):
value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
return re.sub('[-\s]+', '-', value)
class MarkdownDirectory(DirectorySource):
kind = 'markdown'
def process_event(self, event):
md = markdown.Markdown(extensions=['meta'])
event['content'] = md.convert(event['raw'])
event['title'] = ' '.join(md.Meta.get('title', [event['filename']]))
event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])]))
event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]]))
if md.Meta.get('published'):
# Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT)
event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S')))
event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest()
if time.gmtime() < event['timestamp']:
return None
else:
return event
source_registry.add(MarkdownDirectory)
## Instruction:
Allow MarkdownDirectory events to specify author.
## Code After:
from been.core import DirectorySource, source_registry
from hashlib import sha1
import re
import unicodedata
import time
import markdown
# slugify from Django source (BSD license)
def slugify(value):
value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
return re.sub('[-\s]+', '-', value)
class MarkdownDirectory(DirectorySource):
kind = 'markdown'
def process_event(self, event):
md = markdown.Markdown(extensions=['meta'])
event['content'] = md.convert(event['raw'])
event['title'] = ' '.join(md.Meta.get('title', [event['filename']]))
event['author'] = ' '.join(md.Meta.get('author', ['']))
event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])]))
event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]]))
if md.Meta.get('published'):
# Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT)
event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S')))
event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest()
if time.gmtime() < event['timestamp']:
return None
else:
return event
source_registry.add(MarkdownDirectory)
|
# ... existing code ...
event['title'] = ' '.join(md.Meta.get('title', [event['filename']]))
event['author'] = ' '.join(md.Meta.get('author', ['']))
event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])]))
# ... rest of the code ...
|
2e897f7dce89d4b52c3507c62e7120ee238b713c
|
database/database_setup.py
|
database/database_setup.py
|
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from models.base import Base
from models.user import User
from models.store import Store
from models.product import Product
engine = create_engine('sqlite:///productcatalog.db')
Base.metadata.create_all(engine)
|
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from models.base import Base
from models.user import User
from models.store import Store
from models.product import Product
engine = create_engine('postgresql://catalog:catalog123!@localhost:8000/catalog')
Base.metadata.create_all(engine)
|
Connect database engine to postgresql
|
feat: Connect database engine to postgresql
|
Python
|
mit
|
caasted/aws-flask-catalog-app,caasted/aws-flask-catalog-app
|
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from models.base import Base
from models.user import User
from models.store import Store
from models.product import Product
- engine = create_engine('sqlite:///productcatalog.db')
+ engine = create_engine('postgresql://catalog:catalog123!@localhost:8000/catalog')
Base.metadata.create_all(engine)
|
Connect database engine to postgresql
|
## Code Before:
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from models.base import Base
from models.user import User
from models.store import Store
from models.product import Product
engine = create_engine('sqlite:///productcatalog.db')
Base.metadata.create_all(engine)
## Instruction:
Connect database engine to postgresql
## Code After:
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from models.base import Base
from models.user import User
from models.store import Store
from models.product import Product
engine = create_engine('postgresql://catalog:catalog123!@localhost:8000/catalog')
Base.metadata.create_all(engine)
|
...
engine = create_engine('postgresql://catalog:catalog123!@localhost:8000/catalog')
Base.metadata.create_all(engine)
...
|
4148c03ce666f12b8b04be7103ae6a969dd0c022
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
env.hosts = [
'[email protected]'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
|
from fabric.api import *
env.hosts = [
'[email protected]'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('./vendor/bin/carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
|
Use included carton executable on deploy
|
Use included carton executable on deploy
|
Python
|
mit
|
skyshaper/happyman,skyshaper/happyman,skyshaper/happyman
|
from fabric.api import *
env.hosts = [
'[email protected]'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
- run('carton install --cached --deployment')
+ run('./vendor/bin/carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
|
Use included carton executable on deploy
|
## Code Before:
from fabric.api import *
env.hosts = [
'[email protected]'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
## Instruction:
Use included carton executable on deploy
## Code After:
from fabric.api import *
env.hosts = [
'[email protected]'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('./vendor/bin/carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
|
# ... existing code ...
run('git remote update && git reset --hard origin/master')
run('./vendor/bin/carton install --cached --deployment')
with cd('python_virtualenv'):
# ... rest of the code ...
|
e5b503d0e66f8422412d0cdeac4ba4f55f14e420
|
spectrum/object.py
|
spectrum/object.py
|
class Object:
"""Represents a generic Spectrum object
Supported Operations:
+-----------+--------------------------------------+
| Operation | Description |
+===========+======================================+
| x == y | Checks if two objects are equal. |
+-----------+--------------------------------------+
| x != y | Checks if two objects are not equal. |
+-----------+--------------------------------------+
This is the class that will be the base class of most objects, since most
have an ID number.
id : int
The ID of the object
"""
def __init__(self, id):
self.id = int(id)
def __eq__(self, other):
return isinstance(other, self.__class__) and other.id == self.id
def __ne__(self, other):
if isinstance(other, self.__class__):
return other.id != self.id
return True
|
class Object:
"""Represents a generic Spectrum object
Supported Operations:
+-----------+--------------------------------------+
| Operation | Description |
+===========+======================================+
| x == y | Checks if two objects are equal. |
+-----------+--------------------------------------+
| x != y | Checks if two objects are not equal. |
+-----------+--------------------------------------+
This class is the base class of most objects, since most
have an ID number.
id : int
The ID of the object
"""
def __init__(self, id):
self.id = int(id)
def __eq__(self, other):
return isinstance(other, self.__class__) and other.id == self.id
def __ne__(self, other):
if isinstance(other, self.__class__):
return other.id != self.id
return True
|
Change wording from future to present tense
|
Documentation: Change wording from future to present tense
|
Python
|
mit
|
treefroog/spectrum.py
|
class Object:
"""Represents a generic Spectrum object
Supported Operations:
+-----------+--------------------------------------+
| Operation | Description |
+===========+======================================+
| x == y | Checks if two objects are equal. |
+-----------+--------------------------------------+
| x != y | Checks if two objects are not equal. |
+-----------+--------------------------------------+
- This is the class that will be the base class of most objects, since most
+ This class is the base class of most objects, since most
have an ID number.
id : int
The ID of the object
"""
def __init__(self, id):
self.id = int(id)
def __eq__(self, other):
return isinstance(other, self.__class__) and other.id == self.id
def __ne__(self, other):
if isinstance(other, self.__class__):
return other.id != self.id
return True
|
Change wording from future to present tense
|
## Code Before:
class Object:
"""Represents a generic Spectrum object
Supported Operations:
+-----------+--------------------------------------+
| Operation | Description |
+===========+======================================+
| x == y | Checks if two objects are equal. |
+-----------+--------------------------------------+
| x != y | Checks if two objects are not equal. |
+-----------+--------------------------------------+
This is the class that will be the base class of most objects, since most
have an ID number.
id : int
The ID of the object
"""
def __init__(self, id):
self.id = int(id)
def __eq__(self, other):
return isinstance(other, self.__class__) and other.id == self.id
def __ne__(self, other):
if isinstance(other, self.__class__):
return other.id != self.id
return True
## Instruction:
Change wording from future to present tense
## Code After:
class Object:
"""Represents a generic Spectrum object
Supported Operations:
+-----------+--------------------------------------+
| Operation | Description |
+===========+======================================+
| x == y | Checks if two objects are equal. |
+-----------+--------------------------------------+
| x != y | Checks if two objects are not equal. |
+-----------+--------------------------------------+
This class is the base class of most objects, since most
have an ID number.
id : int
The ID of the object
"""
def __init__(self, id):
self.id = int(id)
def __eq__(self, other):
return isinstance(other, self.__class__) and other.id == self.id
def __ne__(self, other):
if isinstance(other, self.__class__):
return other.id != self.id
return True
|
// ... existing code ...
This class is the base class of most objects, since most
have an ID number.
// ... rest of the code ...
|
da54fa6d681ab7f2e3146b55d562e5a4d68623cc
|
luigi/tasks/export/ftp/__init__.py
|
luigi/tasks/export/ftp/__init__.py
|
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
|
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
yield GoAnnotationExport
|
Make GO term export part of FTP export
|
Make GO term export part of FTP export
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
+ from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
+ yield GoAnnotationExport
|
Make GO term export part of FTP export
|
## Code Before:
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
## Instruction:
Make GO term export part of FTP export
## Code After:
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
yield GoAnnotationExport
|
...
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
...
yield EnsemblExport
yield GoAnnotationExport
...
|
9545c2d78696d7f75299d958cf44f8cf695581ac
|
DGEclust/readCountData.py
|
DGEclust/readCountData.py
|
import numpy as np
import pandas as pd
################################################################################
def readCountData(fname, classes = None, *args, **kargs):
df = pd.read_table(fname, *args, **kargs)
## add attributes
df.counts = df.values
df.exposures = df.sum() / df.sum().astype('double') #df.sum() / df.sum().max().astype('double')
df.samples = df.columns
df.genes = df.index
## classes
if classes is None:
df.classes = np.arange(df.samples.size).astype('str')
else:
df.classes = classes
return df
################################################################################
|
import numpy as np
import pandas as pd
################################################################################
def readCountData(fname, classes = None, *args, **kargs):
df = pd.read_table(fname, *args, **kargs)
## add attributes
df.counts = df.values
df.exposures = df.sum() / df.sum().max().astype('double')
df.samples = df.columns
df.genes = df.index
## classes
if classes is None:
df.classes = np.arange(df.samples.size).astype('str')
else:
df.classes = classes
return df
################################################################################
|
Normalize by the size of the library
|
Normalize by the size of the library
|
Python
|
mit
|
dvav/dgeclust
|
import numpy as np
import pandas as pd
################################################################################
def readCountData(fname, classes = None, *args, **kargs):
df = pd.read_table(fname, *args, **kargs)
## add attributes
df.counts = df.values
- df.exposures = df.sum() / df.sum().astype('double') #df.sum() / df.sum().max().astype('double')
+ df.exposures = df.sum() / df.sum().max().astype('double')
df.samples = df.columns
df.genes = df.index
## classes
if classes is None:
df.classes = np.arange(df.samples.size).astype('str')
else:
df.classes = classes
return df
################################################################################
|
Normalize by the size of the library
|
## Code Before:
import numpy as np
import pandas as pd
################################################################################
def readCountData(fname, classes = None, *args, **kargs):
df = pd.read_table(fname, *args, **kargs)
## add attributes
df.counts = df.values
df.exposures = df.sum() / df.sum().astype('double') #df.sum() / df.sum().max().astype('double')
df.samples = df.columns
df.genes = df.index
## classes
if classes is None:
df.classes = np.arange(df.samples.size).astype('str')
else:
df.classes = classes
return df
################################################################################
## Instruction:
Normalize by the size of the library
## Code After:
import numpy as np
import pandas as pd
################################################################################
def readCountData(fname, classes = None, *args, **kargs):
df = pd.read_table(fname, *args, **kargs)
## add attributes
df.counts = df.values
df.exposures = df.sum() / df.sum().max().astype('double')
df.samples = df.columns
df.genes = df.index
## classes
if classes is None:
df.classes = np.arange(df.samples.size).astype('str')
else:
df.classes = classes
return df
################################################################################
|
...
df.counts = df.values
df.exposures = df.sum() / df.sum().max().astype('double')
df.samples = df.columns
...
|
63f04662f5ca22443ab6080f559ac898302cf103
|
tests/integration/conftest.py
|
tests/integration/conftest.py
|
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
final_list = []
on_redeploy_tests = []
for item in items:
if item.get_marker('on_redeploy') is not None:
on_redeploy_tests.append(item)
else:
final_list.append(item)
final_list.extend(on_redeploy_tests)
items[:] = final_list
|
DEPLOY_TEST_BASENAME = 'test_features.py'
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
marked = []
unmarked = []
for item in items[start:end]:
if item.get_marker('on_redeploy') is not None:
marked.append(item)
else:
unmarked.append(item)
items[start:end] = unmarked + marked
def _get_start_end_index(basename, items):
# precondition: all the tests for test_features.py are
# in a contiguous range. This is the case because pytest
# will group all tests in a module together.
matched = [item.fspath.basename == basename for item in items]
return (
matched.index(True),
len(matched) - list(reversed(matched)).index(True)
)
|
Reorder redeploy tests within a single module
|
Reorder redeploy tests within a single module
The original code for on_redeploy was making the
assumption that there was only one integration test file.
When test_package.py was added, the tests always failed
because the redeploy tests were run *after* the package tests
which messed with the module scope fixtures.
Now we ensure we only reorder tests within test_features.py.
|
Python
|
apache-2.0
|
awslabs/chalice
|
+ DEPLOY_TEST_BASENAME = 'test_features.py'
+
+
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
- final_list = []
- on_redeploy_tests = []
+ start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
+ marked = []
+ unmarked = []
- for item in items:
+ for item in items[start:end]:
if item.get_marker('on_redeploy') is not None:
- on_redeploy_tests.append(item)
+ marked.append(item)
else:
- final_list.append(item)
+ unmarked.append(item)
+ items[start:end] = unmarked + marked
- final_list.extend(on_redeploy_tests)
- items[:] = final_list
+
+ def _get_start_end_index(basename, items):
+ # precondition: all the tests for test_features.py are
+ # in a contiguous range. This is the case because pytest
+ # will group all tests in a module together.
+ matched = [item.fspath.basename == basename for item in items]
+ return (
+ matched.index(True),
+ len(matched) - list(reversed(matched)).index(True)
+ )
+
|
Reorder redeploy tests within a single module
|
## Code Before:
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
final_list = []
on_redeploy_tests = []
for item in items:
if item.get_marker('on_redeploy') is not None:
on_redeploy_tests.append(item)
else:
final_list.append(item)
final_list.extend(on_redeploy_tests)
items[:] = final_list
## Instruction:
Reorder redeploy tests within a single module
## Code After:
DEPLOY_TEST_BASENAME = 'test_features.py'
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
marked = []
unmarked = []
for item in items[start:end]:
if item.get_marker('on_redeploy') is not None:
marked.append(item)
else:
unmarked.append(item)
items[start:end] = unmarked + marked
def _get_start_end_index(basename, items):
# precondition: all the tests for test_features.py are
# in a contiguous range. This is the case because pytest
# will group all tests in a module together.
matched = [item.fspath.basename == basename for item in items]
return (
matched.index(True),
len(matched) - list(reversed(matched)).index(True)
)
|
...
DEPLOY_TEST_BASENAME = 'test_features.py'
def pytest_collection_modifyitems(session, config, items):
...
# tests that don't need a redeploy.
start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
marked = []
unmarked = []
for item in items[start:end]:
if item.get_marker('on_redeploy') is not None:
marked.append(item)
else:
unmarked.append(item)
items[start:end] = unmarked + marked
def _get_start_end_index(basename, items):
# precondition: all the tests for test_features.py are
# in a contiguous range. This is the case because pytest
# will group all tests in a module together.
matched = [item.fspath.basename == basename for item in items]
return (
matched.index(True),
len(matched) - list(reversed(matched)).index(True)
)
...
|
862c2bdeaab094afdd61db862be54a8c4b7c08f3
|
corehq/apps/users/admin.py
|
corehq/apps/users/admin.py
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class CustomUserAdmin(UserAdmin):
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class ApiKeyInline(admin.TabularInline):
model = HQApiKey
readonly_fields = ['key', 'created']
extra = 1
class CustomUserAdmin(UserAdmin):
inlines = [
ApiKeyInline,
]
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
|
Add ApiKey to Users page in Django Admin
|
Add ApiKey to Users page in Django Admin
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
- from .models import DomainPermissionsMirror
+ from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
+ class ApiKeyInline(admin.TabularInline):
+ model = HQApiKey
+ readonly_fields = ['key', 'created']
+ extra = 1
+
+
class CustomUserAdmin(UserAdmin):
+ inlines = [
+ ApiKeyInline,
+ ]
+
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
|
Add ApiKey to Users page in Django Admin
|
## Code Before:
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class CustomUserAdmin(UserAdmin):
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
## Instruction:
Add ApiKey to Users page in Django Admin
## Code After:
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class ApiKeyInline(admin.TabularInline):
model = HQApiKey
readonly_fields = ['key', 'created']
extra = 1
class CustomUserAdmin(UserAdmin):
inlines = [
ApiKeyInline,
]
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
|
// ... existing code ...
from .models import DomainPermissionsMirror, HQApiKey
// ... modified code ...
class ApiKeyInline(admin.TabularInline):
model = HQApiKey
readonly_fields = ['key', 'created']
extra = 1
class CustomUserAdmin(UserAdmin):
inlines = [
ApiKeyInline,
]
def has_add_permission(self, request):
// ... rest of the code ...
|
16742262b6a37f34bf83b3b6d6bcfd72e69276b2
|
imagersite/imager_profile/models.py
|
imagersite/imager_profile/models.py
|
import six
from django.db import models
from django.contrib.auth.models import User
@six.python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
fav_camera = models.CharField(max_length=30)
address = models.CharField(max_length=100)
web_url = models.URLField()
type_photography = models.CharField(max_length=30)
def __str__(self):
return "{}'s profile".format(self.user.username)
|
import six
from django.db import models
from django.contrib.auth.models import ActiveProfileManager, User
@six.python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(
User,
nullable=False
)
fav_camera = models.CharField(
max_length=30
)
address = models.CharField()
web_url = models.URLField()
type_photography = models.CharField(max_length=30)
objects = models.Manager()
active = ActiveProfileManager()
def __str__(self):
return "{}'s profile".format(self.user.username)
def is_active(self):
return self.user.is_active
|
Make sure a user has profile
|
Make sure a user has profile
|
Python
|
mit
|
jesseklein406/django-imager,jesseklein406/django-imager,jesseklein406/django-imager
|
import six
from django.db import models
- from django.contrib.auth.models import User
+ from django.contrib.auth.models import ActiveProfileManager, User
@six.python_2_unicode_compatible
class ImagerProfile(models.Model):
- user = models.OneToOneField(User)
+ user = models.OneToOneField(
+ User,
+ nullable=False
+ )
- fav_camera = models.CharField(max_length=30)
+ fav_camera = models.CharField(
+ max_length=30
+ )
- address = models.CharField(max_length=100)
+ address = models.CharField()
web_url = models.URLField()
type_photography = models.CharField(max_length=30)
+
+ objects = models.Manager()
+ active = ActiveProfileManager()
def __str__(self):
return "{}'s profile".format(self.user.username)
+ def is_active(self):
+ return self.user.is_active
+
|
Make sure a user has profile
|
## Code Before:
import six
from django.db import models
from django.contrib.auth.models import User
@six.python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
fav_camera = models.CharField(max_length=30)
address = models.CharField(max_length=100)
web_url = models.URLField()
type_photography = models.CharField(max_length=30)
def __str__(self):
return "{}'s profile".format(self.user.username)
## Instruction:
Make sure a user has profile
## Code After:
import six
from django.db import models
from django.contrib.auth.models import ActiveProfileManager, User
@six.python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(
User,
nullable=False
)
fav_camera = models.CharField(
max_length=30
)
address = models.CharField()
web_url = models.URLField()
type_photography = models.CharField(max_length=30)
objects = models.Manager()
active = ActiveProfileManager()
def __str__(self):
return "{}'s profile".format(self.user.username)
def is_active(self):
return self.user.is_active
|
# ... existing code ...
from django.db import models
from django.contrib.auth.models import ActiveProfileManager, User
# ... modified code ...
class ImagerProfile(models.Model):
user = models.OneToOneField(
User,
nullable=False
)
fav_camera = models.CharField(
max_length=30
)
address = models.CharField()
web_url = models.URLField()
...
objects = models.Manager()
active = ActiveProfileManager()
def __str__(self):
...
return "{}'s profile".format(self.user.username)
def is_active(self):
return self.user.is_active
# ... rest of the code ...
|
7d98adbfd08cbb72b6a9cc4ffe585756203b4e43
|
app/__init__.py
|
app/__init__.py
|
import logging
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
|
import logging
from os import environ
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
# Override config if needed
if 'POSIO_SETTINGS' in environ:
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
|
Check if custom config file is set
|
Check if custom config file is set
|
Python
|
mit
|
abrenaut/posio,abrenaut/posio,abrenaut/posio
|
import logging
+ from os import environ
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
+ # Override config if needed
+ if 'POSIO_SETTINGS' in environ:
- app.config.from_envvar('POSIO_SETTINGS')
+ app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
|
Check if custom config file is set
|
## Code Before:
import logging
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
## Instruction:
Check if custom config file is set
## Code After:
import logging
from os import environ
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
# Override config if needed
if 'POSIO_SETTINGS' in environ:
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
|
# ... existing code ...
import logging
from os import environ
from flask import Flask
# ... modified code ...
app.config.from_object('config')
# Override config if needed
if 'POSIO_SETTINGS' in environ:
app.config.from_envvar('POSIO_SETTINGS')
# ... rest of the code ...
|
b9654ffbbd1c2057d1ff377a0190b115f568d080
|
knights/defaulttags.py
|
knights/defaulttags.py
|
from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
|
from .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
|
Rewrite 'now' tag to use BasicNode
|
Rewrite 'now' tag to use BasicNode
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
from .library import Library
+ from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
- def now(parser, token):
+ class NowNode(BasicNode):
+ def render(self, fmt):
+ val = datetime.datetime.now()
+ return val.strftime(fmt)
- args, kwargs = parser.parse_args(token)
-
- def _now(context):
- a, k = parser.resolve_args(context, args, kwargs)
- val = datetime.datetime.now()
- return val.strftime(a[0])
-
- return _now
-
|
Rewrite 'now' tag to use BasicNode
|
## Code Before:
from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
## Instruction:
Rewrite 'now' tag to use BasicNode
## Code After:
from .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
|
...
from .library import Library
from .parse import BasicNode
...
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
...
|
dc04c35177815ff2aee46088cac7d6790e6831dd
|
swimlane/core/search/search_result.py
|
swimlane/core/search/search_result.py
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = (resp["results"][report.applicationIds[0]]
if resp["results"] else [])
self.records = (Record(SwimlaneDict(r)) for r in results)
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
|
Fix a KeyError that is raised when there are no reuslts
|
Fix a KeyError that is raised when there are no reuslts
|
Python
|
mit
|
Swimlane/sw-python-client
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
+ results = []
+ if report.applicationIds[0] in resp['results']:
- results = (resp["results"][report.applicationIds[0]]
+ results = resp["results"][report.applicationIds[0]]
- if resp["results"] else [])
self.records = (Record(SwimlaneDict(r)) for r in results)
-
|
Fix a KeyError that is raised when there are no reuslts
|
## Code Before:
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = (resp["results"][report.applicationIds[0]]
if resp["results"] else [])
self.records = (Record(SwimlaneDict(r)) for r in results)
## Instruction:
Fix a KeyError that is raised when there are no reuslts
## Code After:
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
|
...
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
...
|
2b58318ad7134a8c894b70918520a89b51a2d6dd
|
cla_backend/apps/reports/tests/test_utils.py
|
cla_backend/apps/reports/tests/test_utils.py
|
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002")
def test_get_s3_connection(self):
envs = {"AWS_S3_HOST": "s3.eu-west-2.amazonaws.com", "S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com")
def test_get_s3_connection(self):
envs = {"S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
Modify s3 connection test for new AWS_S3_HOST setting
|
Modify s3 connection test for new AWS_S3_HOST setting
The value is now calculated from the env var at load time, so mocking
the env var value is not effective
(cherry picked from commit 044219df7123e3a03a38cc06c9e8e8e9e80b0cbe)
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
- @override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002")
+ @override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com")
def test_get_s3_connection(self):
- envs = {"AWS_S3_HOST": "s3.eu-west-2.amazonaws.com", "S3_USE_SIGV4": "True"}
+ envs = {"S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
Modify s3 connection test for new AWS_S3_HOST setting
|
## Code Before:
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002")
def test_get_s3_connection(self):
envs = {"AWS_S3_HOST": "s3.eu-west-2.amazonaws.com", "S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
## Instruction:
Modify s3 connection test for new AWS_S3_HOST setting
## Code After:
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com")
def test_get_s3_connection(self):
envs = {"S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
# ... existing code ...
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com")
def test_get_s3_connection(self):
envs = {"S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
# ... rest of the code ...
|
fb3abf0d1cf27d23c78dd8101dd0c54cf589c2ef
|
corehq/apps/locations/resources/v0_6.py
|
corehq/apps/locations/resources/v0_6.py
|
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.objects.filter(is_archived=False).all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
|
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.active_objects.all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
|
Use objects manager that automatically filters out archived forms
|
Use objects manager that automatically filters out archived forms
Co-authored-by: Ethan Soergel <c1732a0c832c5c8cbfae77286e6475129315f488@users.noreply.github.com>
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
- queryset = SQLLocation.objects.filter(is_archived=False).all()
+ queryset = SQLLocation.active_objects.all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
|
Use objects manager that automatically filters out archived forms
|
## Code Before:
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.objects.filter(is_archived=False).all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
## Instruction:
Use objects manager that automatically filters out archived forms
## Code After:
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.active_objects.all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
|
// ... existing code ...
class Meta:
queryset = SQLLocation.active_objects.all()
detail_uri_name = 'location_id'
// ... rest of the code ...
|
216f0bb3680b86ac2dfc8c506b791db4e34eeee6
|
nextactions/board.py
|
nextactions/board.py
|
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
for l in self.getLists():
if l.name == name:
return l
return None
|
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
matches = [l for l in self.getLists() if l.name == name]
return matches[0] if len(matches) else None
|
Tidy matching lists by name
|
Tidy matching lists by name
|
Python
|
mit
|
stevecshanks/trello-next-actions
|
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
+ matches = [l for l in self.getLists() if l.name == name]
+ return matches[0] if len(matches) else None
- for l in self.getLists():
- if l.name == name:
- return l
- return None
|
Tidy matching lists by name
|
## Code Before:
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
for l in self.getLists():
if l.name == name:
return l
return None
## Instruction:
Tidy matching lists by name
## Code After:
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
matches = [l for l in self.getLists() if l.name == name]
return matches[0] if len(matches) else None
|
// ... existing code ...
def getListByName(self, name):
matches = [l for l in self.getLists() if l.name == name]
return matches[0] if len(matches) else None
// ... rest of the code ...
|
a6cb8d3c2d79b609a6d5d0550af57aa2b9328f7f
|
mopidy_vkontakte/actor.py
|
mopidy_vkontakte/actor.py
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = VKPlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
class VKPlaybackProvider(base.BasePlaybackProvider):
def play(self, track):
return super(VKPlaybackProvider, self).play(track)
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
|
Remove PlaybackProvider that does nothing
|
Remove PlaybackProvider that does nothing
|
Python
|
apache-2.0
|
sibuser/mopidy-vkontakte
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
- self.playback = VKPlaybackProvider(audio=audio, backend=self)
+ self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
-
- class VKPlaybackProvider(base.BasePlaybackProvider):
-
- def play(self, track):
- return super(VKPlaybackProvider, self).play(track)
-
|
Remove PlaybackProvider that does nothing
|
## Code Before:
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = VKPlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
class VKPlaybackProvider(base.BasePlaybackProvider):
def play(self, track):
return super(VKPlaybackProvider, self).play(track)
## Instruction:
Remove PlaybackProvider that does nothing
## Code After:
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
|
// ... existing code ...
self.library = VKLibraryProvider(backend=self)
self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
// ... modified code ...
self.uri_schemes = ['vkontakte']
// ... rest of the code ...
|
95542ab1b7c22a6e0160e242349c66f2cef7e390
|
syntacticframes_project/syntacticframes/management/commands/check_correspondance_errors.py
|
syntacticframes_project/syntacticframes/management/commands/check_correspondance_errors.py
|
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
for vn_class in VerbNetClass.objects.all():
try:
parse.get_ladl_list(vn_class.ladl_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(vn_class.name, e))
try:
parse.get_lvf_list(vn_class.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(vn_class.name, e))
|
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetFrameSet
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
for frameset in VerbNetFrameSet.objects.all():
print("{}: {}/{}".format(frameset.name, frameset.ladl_string, frameset.lvf_string))
if frameset.ladl_string:
try:
parse.FrenchMapping('LADL', frameset.ladl_string).result()
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
if frameset.lvf_string:
try:
parse.FrenchMapping('LVF', frameset.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
|
Check correspondances in framesets now
|
Check correspondances in framesets now
|
Python
|
mit
|
aymara/verbenet-editor,aymara/verbenet-editor,aymara/verbenet-editor
|
from django.core.management.base import BaseCommand
- from syntacticframes.models import VerbNetClass
+ from syntacticframes.models import VerbNetFrameSet
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
- for vn_class in VerbNetClass.objects.all():
+ for frameset in VerbNetFrameSet.objects.all():
+ print("{}: {}/{}".format(frameset.name, frameset.ladl_string, frameset.lvf_string))
- try:
- parse.get_ladl_list(vn_class.ladl_string)
- except parse.UnknownClassException as e:
- print('{:<30} {}'.format(vn_class.name, e))
+ if frameset.ladl_string:
- try:
+ try:
- parse.get_lvf_list(vn_class.lvf_string)
+ parse.FrenchMapping('LADL', frameset.ladl_string).result()
- except parse.UnknownClassException as e:
+ except parse.UnknownClassException as e:
- print('{:<30} {}'.format(vn_class.name, e))
+ print('{:<30} {}'.format(frameset.name, e))
+ if frameset.lvf_string:
+ try:
+ parse.FrenchMapping('LVF', frameset.lvf_string)
+ except parse.UnknownClassException as e:
+ print('{:<30} {}'.format(frameset.name, e))
+
|
Check correspondances in framesets now
|
## Code Before:
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
for vn_class in VerbNetClass.objects.all():
try:
parse.get_ladl_list(vn_class.ladl_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(vn_class.name, e))
try:
parse.get_lvf_list(vn_class.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(vn_class.name, e))
## Instruction:
Check correspondances in framesets now
## Code After:
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetFrameSet
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
for frameset in VerbNetFrameSet.objects.all():
print("{}: {}/{}".format(frameset.name, frameset.ladl_string, frameset.lvf_string))
if frameset.ladl_string:
try:
parse.FrenchMapping('LADL', frameset.ladl_string).result()
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
if frameset.lvf_string:
try:
parse.FrenchMapping('LVF', frameset.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
|
// ... existing code ...
from syntacticframes.models import VerbNetFrameSet
from parsecorrespondance import parse
// ... modified code ...
def handle(self, *args, **options):
for frameset in VerbNetFrameSet.objects.all():
print("{}: {}/{}".format(frameset.name, frameset.ladl_string, frameset.lvf_string))
if frameset.ladl_string:
try:
parse.FrenchMapping('LADL', frameset.ladl_string).result()
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
if frameset.lvf_string:
try:
parse.FrenchMapping('LVF', frameset.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
// ... rest of the code ...
|
9d02361c034591e44e1a6d745911ef72a3591950
|
pingparsing/_interface.py
|
pingparsing/_interface.py
|
from __future__ import absolute_import
from __future__ import division
import abc
class PingParserInterface(object):
@abc.abstractproperty
def packet_transmit(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_receive(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_loss_rate(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_loss_count(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_min(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_avg(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_max(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_mdev(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_duplicate_rate(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_duplicate_count(self): # pragma: no cover
pass
|
from __future__ import absolute_import
from __future__ import division
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class PingParserInterface(object):
@abc.abstractproperty
def packet_transmit(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_receive(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_loss_rate(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_loss_count(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_min(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_avg(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_max(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_mdev(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_duplicate_rate(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_duplicate_count(self): # pragma: no cover
pass
|
Fix missing metaclass for an interface class
|
Fix missing metaclass for an interface class
|
Python
|
mit
|
thombashi/pingparsing,thombashi/pingparsing
|
from __future__ import absolute_import
from __future__ import division
import abc
+ import six
+
+ @six.add_metaclass(abc.ABCMeta)
class PingParserInterface(object):
@abc.abstractproperty
def packet_transmit(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_receive(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_loss_rate(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_loss_count(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_min(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_avg(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_max(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_mdev(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_duplicate_rate(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_duplicate_count(self): # pragma: no cover
pass
|
Fix missing metaclass for an interface class
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
import abc
class PingParserInterface(object):
@abc.abstractproperty
def packet_transmit(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_receive(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_loss_rate(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_loss_count(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_min(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_avg(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_max(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_mdev(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_duplicate_rate(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_duplicate_count(self): # pragma: no cover
pass
## Instruction:
Fix missing metaclass for an interface class
## Code After:
from __future__ import absolute_import
from __future__ import division
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class PingParserInterface(object):
@abc.abstractproperty
def packet_transmit(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_receive(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_loss_rate(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_loss_count(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_min(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_avg(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_max(self): # pragma: no cover
pass
@abc.abstractproperty
def rtt_mdev(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_duplicate_rate(self): # pragma: no cover
pass
@abc.abstractproperty
def packet_duplicate_count(self): # pragma: no cover
pass
|
...
import six
@six.add_metaclass(abc.ABCMeta)
class PingParserInterface(object):
...
|
22823ca55e4c342149b83d84d18ad879d55023d7
|
oslib/__init__.py
|
oslib/__init__.py
|
from .state import OSState
from .user import OSUser
|
from .state import OSState
from .user import OSUser
from .find import findff, findfr, findrf, findrr, superfind
|
Add find import to oslib init
|
Add find import to oslib init
|
Python
|
mit
|
Caian/ostools
|
from .state import OSState
from .user import OSUser
+ from .find import findff, findfr, findrf, findrr, superfind
-
|
Add find import to oslib init
|
## Code Before:
from .state import OSState
from .user import OSUser
## Instruction:
Add find import to oslib init
## Code After:
from .state import OSState
from .user import OSUser
from .find import findff, findfr, findrf, findrr, superfind
|
# ... existing code ...
from .user import OSUser
from .find import findff, findfr, findrf, findrr, superfind
# ... rest of the code ...
|
33e1c781b0e430cb1e0df19d02ed06a193f9d202
|
waterbutler/identity.py
|
waterbutler/identity.py
|
import asyncio
from waterbutler import settings
@asyncio.coroutine
def fetch_rest_identity(params):
response = yield from aiohttp.request(
'get',
settings.IDENTITY_API_URL,
params=params,
headers={'Content-Type': 'application/json'},
)
# TOOD Handle Errors nicely
if response.status != 200:
data = yield from response.read()
raise web.HTTPError(response.status)
data = yield from response.json()
return data
IDENTITY_METHODS = {
'rest': fetch_rest_identity
}
get_identity = IDENTITY_METHODS[settings.IDENTITY_METHOD]
|
import asyncio
import aiohttp
from waterbutler import settings
IDENTITY_METHODS = {}
def get_identity_func(name):
try:
return IDENTITY_METHODS[name]
except KeyError:
raise NotImplementedError('No identity getter for {0}'.format(name))
def register_identity(name):
def _register_identity(func):
IDENTITY_METHODS[name] = func
return func
return _register_identity
def get_identity(name, **kwargs):
return get_identity_func(name)(**kwargs)
@register_identity('rest')
@asyncio.coroutine
def fetch_rest_identity(**params):
response = yield from aiohttp.request(
'get',
settings.IDENTITY_API_URL,
params=params,
headers={'Content-Type': 'application/json'},
)
# TOOD Handle Errors nicely
if response.status != 200:
data = yield from response.read()
raise web.HTTPError(response.status)
data = yield from response.json()
return data
|
Make use of a register decorator
|
Make use of a register decorator
|
Python
|
apache-2.0
|
CenterForOpenScience/waterbutler,kwierman/waterbutler,TomBaxter/waterbutler,rafaeldelucena/waterbutler,Ghalko/waterbutler,RCOSDP/waterbutler,hmoco/waterbutler,felliott/waterbutler,rdhyee/waterbutler,Johnetordoff/waterbutler,icereval/waterbutler,chrisseto/waterbutler,cosenal/waterbutler
|
import asyncio
+
+ import aiohttp
from waterbutler import settings
+ IDENTITY_METHODS = {}
+
+
+ def get_identity_func(name):
+ try:
+ return IDENTITY_METHODS[name]
+ except KeyError:
+ raise NotImplementedError('No identity getter for {0}'.format(name))
+
+
+ def register_identity(name):
+ def _register_identity(func):
+ IDENTITY_METHODS[name] = func
+ return func
+ return _register_identity
+
+
+ def get_identity(name, **kwargs):
+ return get_identity_func(name)(**kwargs)
+
+
+ @register_identity('rest')
@asyncio.coroutine
- def fetch_rest_identity(params):
+ def fetch_rest_identity(**params):
response = yield from aiohttp.request(
'get',
settings.IDENTITY_API_URL,
params=params,
headers={'Content-Type': 'application/json'},
)
# TOOD Handle Errors nicely
if response.status != 200:
data = yield from response.read()
raise web.HTTPError(response.status)
data = yield from response.json()
return data
- IDENTITY_METHODS = {
- 'rest': fetch_rest_identity
- }
-
- get_identity = IDENTITY_METHODS[settings.IDENTITY_METHOD]
-
|
Make use of a register decorator
|
## Code Before:
import asyncio
from waterbutler import settings
@asyncio.coroutine
def fetch_rest_identity(params):
response = yield from aiohttp.request(
'get',
settings.IDENTITY_API_URL,
params=params,
headers={'Content-Type': 'application/json'},
)
# TOOD Handle Errors nicely
if response.status != 200:
data = yield from response.read()
raise web.HTTPError(response.status)
data = yield from response.json()
return data
IDENTITY_METHODS = {
'rest': fetch_rest_identity
}
get_identity = IDENTITY_METHODS[settings.IDENTITY_METHOD]
## Instruction:
Make use of a register decorator
## Code After:
import asyncio
import aiohttp
from waterbutler import settings
IDENTITY_METHODS = {}
def get_identity_func(name):
try:
return IDENTITY_METHODS[name]
except KeyError:
raise NotImplementedError('No identity getter for {0}'.format(name))
def register_identity(name):
def _register_identity(func):
IDENTITY_METHODS[name] = func
return func
return _register_identity
def get_identity(name, **kwargs):
return get_identity_func(name)(**kwargs)
@register_identity('rest')
@asyncio.coroutine
def fetch_rest_identity(**params):
response = yield from aiohttp.request(
'get',
settings.IDENTITY_API_URL,
params=params,
headers={'Content-Type': 'application/json'},
)
# TOOD Handle Errors nicely
if response.status != 200:
data = yield from response.read()
raise web.HTTPError(response.status)
data = yield from response.json()
return data
|
// ... existing code ...
import asyncio
import aiohttp
// ... modified code ...
IDENTITY_METHODS = {}
def get_identity_func(name):
try:
return IDENTITY_METHODS[name]
except KeyError:
raise NotImplementedError('No identity getter for {0}'.format(name))
def register_identity(name):
def _register_identity(func):
IDENTITY_METHODS[name] = func
return func
return _register_identity
def get_identity(name, **kwargs):
return get_identity_func(name)(**kwargs)
@register_identity('rest')
@asyncio.coroutine
def fetch_rest_identity(**params):
response = yield from aiohttp.request(
...
return data
// ... rest of the code ...
|
c3cb6a294fe83557d86d9415f8cdf8efb4f7e59f
|
elevator/message.py
|
elevator/message.py
|
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return [unicode(d) for d in datas]
|
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return datas
|
Fix : response datas list should not be unicoded
|
Fix : response datas list should not be unicoded
|
Python
|
mit
|
oleiade/Elevator
|
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
- return [unicode(d) for d in datas]
+ return datas
-
|
Fix : response datas list should not be unicoded
|
## Code Before:
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return [unicode(d) for d in datas]
## Instruction:
Fix : response datas list should not be unicoded
## Code After:
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return datas
|
// ... existing code ...
datas = [datas]
return datas
// ... rest of the code ...
|
7f9a31a03e68e1d9dc6f420c6aa157e657da4157
|
apps/core/templatetags/files.py
|
apps/core/templatetags/files.py
|
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
"""Removes traceback lines from a string (if any). It has no effect when
no 'Traceback' pattern has been found.
Returns: raws before the 'Traceback' pattern
"""
return Path(path).name
|
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
"""Removes parent path from a relative or absolute filename
Returns: the filename
"""
return Path(path).name
|
Fix filename template tag docstring
|
Fix filename template tag docstring
|
Python
|
bsd-3-clause
|
Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel
|
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
+ """Removes parent path from a relative or absolute filename
- """Removes traceback lines from a string (if any). It has no effect when
- no 'Traceback' pattern has been found.
- Returns: raws before the 'Traceback' pattern
+ Returns: the filename
"""
return Path(path).name
|
Fix filename template tag docstring
|
## Code Before:
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
"""Removes traceback lines from a string (if any). It has no effect when
no 'Traceback' pattern has been found.
Returns: raws before the 'Traceback' pattern
"""
return Path(path).name
## Instruction:
Fix filename template tag docstring
## Code After:
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
"""Removes parent path from a relative or absolute filename
Returns: the filename
"""
return Path(path).name
|
...
def filename(path):
"""Removes parent path from a relative or absolute filename
Returns: the filename
"""
...
|
3f394e47841b2d9e49554b21c67b06a46f99f25c
|
celery_app.py
|
celery_app.py
|
import config
import logging
from celery.schedules import crontab
from lazyblacksmith.app import create_app
from lazyblacksmith.extension.celery_app import celery_app
# disable / enable loggers we want
logging.getLogger('pyswagger').setLevel(logging.ERROR)
app = create_app(config)
app.app_context().push()
celery_app.init_app(app)
#celery_app.conf.broker_url = config.broker_url
celery_app.conf.beat_schedule.update({
'character-task-spawner': {
'task': 'schedule.character_task_spawner',
'schedule': crontab(minute='*'),
},
'universe-task-spawner': {
'task': 'schedule.universe_task_spawner',
'schedule': crontab(minute='*/30'),
},
})
celery_app.conf.imports = [
'lazyblacksmith.tasks.task_spawner',
'lazyblacksmith.tasks.market.adjusted_price',
'lazyblacksmith.tasks.market.market_order',
'lazyblacksmith.tasks.industry.indexes',
'lazyblacksmith.tasks.character.skills',
'lazyblacksmith.tasks.character.blueprints',
]
|
import config
import logging
from celery.schedules import crontab
from lazyblacksmith.app import create_app
from lazyblacksmith.extension.celery_app import celery_app
# disable / enable loggers we want
logging.getLogger('pyswagger').setLevel(logging.ERROR)
app = create_app(config)
app.app_context().push()
celery_app.init_app(app)
#celery_app.conf.broker_url = config.broker_url
celery_app.conf.beat_schedule.update({
'character-task-spawner': {
'task': 'schedule.character_task_spawner',
'schedule': crontab(minute='*'),
},
'universe-task-spawner': {
'task': 'schedule.universe_task_spawner',
'schedule': crontab(minute='*/30'),
},
})
celery_app.conf.imports = [
'lazyblacksmith.tasks.task_spawner',
'lazyblacksmith.tasks.market.adjusted_price',
'lazyblacksmith.tasks.market.market_order',
'lazyblacksmith.tasks.industry.indexes',
'lazyblacksmith.tasks.character.skills',
'lazyblacksmith.tasks.character.blueprints',
'lazyblacksmith.tasks.corporation.blueprints',
]
|
Add corporation task in celery data
|
Add corporation task in celery data
|
Python
|
bsd-3-clause
|
Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith
|
import config
import logging
from celery.schedules import crontab
from lazyblacksmith.app import create_app
from lazyblacksmith.extension.celery_app import celery_app
# disable / enable loggers we want
logging.getLogger('pyswagger').setLevel(logging.ERROR)
app = create_app(config)
app.app_context().push()
celery_app.init_app(app)
#celery_app.conf.broker_url = config.broker_url
celery_app.conf.beat_schedule.update({
'character-task-spawner': {
'task': 'schedule.character_task_spawner',
'schedule': crontab(minute='*'),
},
'universe-task-spawner': {
'task': 'schedule.universe_task_spawner',
'schedule': crontab(minute='*/30'),
},
})
celery_app.conf.imports = [
'lazyblacksmith.tasks.task_spawner',
'lazyblacksmith.tasks.market.adjusted_price',
'lazyblacksmith.tasks.market.market_order',
'lazyblacksmith.tasks.industry.indexes',
'lazyblacksmith.tasks.character.skills',
'lazyblacksmith.tasks.character.blueprints',
+ 'lazyblacksmith.tasks.corporation.blueprints',
]
|
Add corporation task in celery data
|
## Code Before:
import config
import logging
from celery.schedules import crontab
from lazyblacksmith.app import create_app
from lazyblacksmith.extension.celery_app import celery_app
# disable / enable loggers we want
logging.getLogger('pyswagger').setLevel(logging.ERROR)
app = create_app(config)
app.app_context().push()
celery_app.init_app(app)
#celery_app.conf.broker_url = config.broker_url
celery_app.conf.beat_schedule.update({
'character-task-spawner': {
'task': 'schedule.character_task_spawner',
'schedule': crontab(minute='*'),
},
'universe-task-spawner': {
'task': 'schedule.universe_task_spawner',
'schedule': crontab(minute='*/30'),
},
})
celery_app.conf.imports = [
'lazyblacksmith.tasks.task_spawner',
'lazyblacksmith.tasks.market.adjusted_price',
'lazyblacksmith.tasks.market.market_order',
'lazyblacksmith.tasks.industry.indexes',
'lazyblacksmith.tasks.character.skills',
'lazyblacksmith.tasks.character.blueprints',
]
## Instruction:
Add corporation task in celery data
## Code After:
import config
import logging
from celery.schedules import crontab
from lazyblacksmith.app import create_app
from lazyblacksmith.extension.celery_app import celery_app
# disable / enable loggers we want
logging.getLogger('pyswagger').setLevel(logging.ERROR)
app = create_app(config)
app.app_context().push()
celery_app.init_app(app)
#celery_app.conf.broker_url = config.broker_url
celery_app.conf.beat_schedule.update({
'character-task-spawner': {
'task': 'schedule.character_task_spawner',
'schedule': crontab(minute='*'),
},
'universe-task-spawner': {
'task': 'schedule.universe_task_spawner',
'schedule': crontab(minute='*/30'),
},
})
celery_app.conf.imports = [
'lazyblacksmith.tasks.task_spawner',
'lazyblacksmith.tasks.market.adjusted_price',
'lazyblacksmith.tasks.market.market_order',
'lazyblacksmith.tasks.industry.indexes',
'lazyblacksmith.tasks.character.skills',
'lazyblacksmith.tasks.character.blueprints',
'lazyblacksmith.tasks.corporation.blueprints',
]
|
# ... existing code ...
'lazyblacksmith.tasks.character.blueprints',
'lazyblacksmith.tasks.corporation.blueprints',
]
# ... rest of the code ...
|
87d4e604ef72fbe0513c725a7fdf0d421e633257
|
changes/api/project_index.py
|
changes/api/project_index.py
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.constants import Status
from changes.models import Project, Build
class ProjectIndexAPIView(APIView):
def get(self):
queryset = Project.query.order_by(Project.name.asc())
project_list = list(queryset)
context = {
'projects': [],
}
for project in project_list:
data = self.serialize(project)
data['lastBuild'] = Build.query.options(
joinedload(Build.project),
joinedload(Build.author),
).filter(
Build.revision_sha != None, # NOQA
Build.patch_id == None,
Build.project == project,
Build.status == Status.finished,
).order_by(
Build.date_created.desc(),
).first()
data['numActiveBuilds'] = Build.query.filter(
Build.project == project,
Build.status != Status.finished,
).count()
context['projects'].append(data)
return self.respond(context)
def get_stream_channels(self):
return ['builds:*']
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.constants import Status
from changes.models import Project, Build
class ProjectIndexAPIView(APIView):
def get(self):
queryset = Project.query.order_by(Project.name.asc())
project_list = list(queryset)
context = {
'projects': [],
}
for project in project_list:
data = self.serialize(project)
data['lastBuild'] = Build.query.options(
joinedload(Build.project),
joinedload(Build.author),
).filter(
Build.revision_sha != None, # NOQA
Build.patch_id == None,
Build.project == project,
Build.status == Status.finished,
).order_by(
Build.date_created.desc(),
).first()
context['projects'].append(data)
return self.respond(context)
def get_stream_channels(self):
return ['builds:*']
|
Remove numActiveBuilds as its unused
|
Remove numActiveBuilds as its unused
|
Python
|
apache-2.0
|
dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.constants import Status
from changes.models import Project, Build
class ProjectIndexAPIView(APIView):
def get(self):
queryset = Project.query.order_by(Project.name.asc())
project_list = list(queryset)
context = {
'projects': [],
}
for project in project_list:
data = self.serialize(project)
data['lastBuild'] = Build.query.options(
joinedload(Build.project),
joinedload(Build.author),
).filter(
Build.revision_sha != None, # NOQA
Build.patch_id == None,
Build.project == project,
Build.status == Status.finished,
).order_by(
Build.date_created.desc(),
).first()
- data['numActiveBuilds'] = Build.query.filter(
- Build.project == project,
- Build.status != Status.finished,
- ).count()
-
context['projects'].append(data)
return self.respond(context)
def get_stream_channels(self):
return ['builds:*']
|
Remove numActiveBuilds as its unused
|
## Code Before:
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.constants import Status
from changes.models import Project, Build
class ProjectIndexAPIView(APIView):
def get(self):
queryset = Project.query.order_by(Project.name.asc())
project_list = list(queryset)
context = {
'projects': [],
}
for project in project_list:
data = self.serialize(project)
data['lastBuild'] = Build.query.options(
joinedload(Build.project),
joinedload(Build.author),
).filter(
Build.revision_sha != None, # NOQA
Build.patch_id == None,
Build.project == project,
Build.status == Status.finished,
).order_by(
Build.date_created.desc(),
).first()
data['numActiveBuilds'] = Build.query.filter(
Build.project == project,
Build.status != Status.finished,
).count()
context['projects'].append(data)
return self.respond(context)
def get_stream_channels(self):
return ['builds:*']
## Instruction:
Remove numActiveBuilds as its unused
## Code After:
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.constants import Status
from changes.models import Project, Build
class ProjectIndexAPIView(APIView):
def get(self):
queryset = Project.query.order_by(Project.name.asc())
project_list = list(queryset)
context = {
'projects': [],
}
for project in project_list:
data = self.serialize(project)
data['lastBuild'] = Build.query.options(
joinedload(Build.project),
joinedload(Build.author),
).filter(
Build.revision_sha != None, # NOQA
Build.patch_id == None,
Build.project == project,
Build.status == Status.finished,
).order_by(
Build.date_created.desc(),
).first()
context['projects'].append(data)
return self.respond(context)
def get_stream_channels(self):
return ['builds:*']
|
...
context['projects'].append(data)
...
|
c0ecc75d2c02a1c6b514b09e5f9ad907fb04ce82
|
new/meshes.py
|
new/meshes.py
|
class RectangularMesh(object):
def __init__(self, d, atlas='atlas', meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, str):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas)
mif += '}\n\n'
return mif
|
from atlases import BoxAtlas
class RectangularMesh(object):
def __init__(self, atlas, d, meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, BoxAtlas):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas.name)
mif += '}\n\n'
return mif
|
Add atlas as an argument for mesh initialisation.
|
Add atlas as an argument for mesh initialisation.
|
Python
|
bsd-2-clause
|
fangohr/oommf-python,fangohr/oommf-python,fangohr/oommf-python
|
+ from atlases import BoxAtlas
+
+
class RectangularMesh(object):
- def __init__(self, d, atlas='atlas', meshname='mesh'):
+ def __init__(self, atlas, d, meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
- if not isinstance(atlas, str):
+ if not isinstance(atlas, BoxAtlas):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
- mif += '\tatlas {}\n'.format(self.atlas)
+ mif += '\tatlas {}\n'.format(self.atlas.name)
mif += '}\n\n'
return mif
|
Add atlas as an argument for mesh initialisation.
|
## Code Before:
class RectangularMesh(object):
def __init__(self, d, atlas='atlas', meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, str):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas)
mif += '}\n\n'
return mif
## Instruction:
Add atlas as an argument for mesh initialisation.
## Code After:
from atlases import BoxAtlas
class RectangularMesh(object):
def __init__(self, atlas, d, meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, BoxAtlas):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas.name)
mif += '}\n\n'
return mif
|
# ... existing code ...
from atlases import BoxAtlas
class RectangularMesh(object):
def __init__(self, atlas, d, meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
# ... modified code ...
if not isinstance(atlas, BoxAtlas):
raise ValueError('atlas must be a string.')
...
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas.name)
mif += '}\n\n'
# ... rest of the code ...
|
39a1212508c27a5c21f8b027fef3fb409a28657f
|
app/commands.py
|
app/commands.py
|
from flask import current_app
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.command('list-routes')(list_routes)
|
import click
from flask import current_app
from flask.cli import with_appcontext
@click.command('list-routes')
@with_appcontext
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.add_command(list_routes)
|
Switch existing command to standard approach
|
Switch existing command to standard approach
This is the suggested approach in the documentation [1] and using
it makes it clearer what's going on and to add other commands with
arguments, which we'll do in the next commit.
[1]: https://flask.palletsprojects.com/en/2.0.x/cli/#custom-commands
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
+ import click
from flask import current_app
+ from flask.cli import with_appcontext
+ @click.command('list-routes')
+ @with_appcontext
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
- application.cli.command('list-routes')(list_routes)
+ application.cli.add_command(list_routes)
|
Switch existing command to standard approach
|
## Code Before:
from flask import current_app
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.command('list-routes')(list_routes)
## Instruction:
Switch existing command to standard approach
## Code After:
import click
from flask import current_app
from flask.cli import with_appcontext
@click.command('list-routes')
@with_appcontext
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.add_command(list_routes)
|
...
import click
from flask import current_app
from flask.cli import with_appcontext
...
@click.command('list-routes')
@with_appcontext
def list_routes():
...
def setup_commands(application):
application.cli.add_command(list_routes)
...
|
dbf8d75c0e4105570676af0bde50d2a4c43e6dd3
|
ain7/organizations/autocomplete_light_registry.py
|
ain7/organizations/autocomplete_light_registry.py
|
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
|
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
add_another_url_name='organization-add',
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
|
Add a link to add a company in user profile
|
Add a link to add a company in user profile
When a user change its experience, s⋅he can add an organization (not an
office) if it does not exist yet.
Link to autocomplete-light module's doc:
http://django-autocomplete-light.readthedocs.io/en/2.3.1/addanother.html#autocompletes.
Fix #3
|
Python
|
lgpl-2.1
|
ain7/www.ain7.org,ain7/www.ain7.org,ain7/www.ain7.org,ain7/www.ain7.org
|
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
+ add_another_url_name='organization-add',
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
|
Add a link to add a company in user profile
|
## Code Before:
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
## Instruction:
Add a link to add a company in user profile
## Code After:
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
add_another_url_name='organization-add',
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
|
# ... existing code ...
search_fields=['name', 'organization__name'],
add_another_url_name='organization-add',
)
# ... rest of the code ...
|
e9605bd92e67c7f5daf7011f871c3a9d915abe76
|
core/urls/base.py
|
core/urls/base.py
|
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from core.views import Home, register
# Use this file to import all other url
from game_website import settings
urlpatterns = patterns(
# Examples:
# url(r'^blog/', include('blog.urls')),
'',
url(r'^$', Home.as_view(), name='home'),
url(r'^games/', include('core.urls.games')),
url(r'^register/$', register, name='register'),
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login', name='logout'),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Used to serve static media in a dev environment. Should be disabled in production
|
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from core.views import Home, register
# Use this file to import all other url
from game_website import settings
urlpatterns = patterns(
# Examples:
# url(r'^blog/', include('blog.urls')),
'',
url(r'^$', Home.as_view(), name='home'),
url(r'^games/', include('core.urls.games')),
url(r'^register/$', register, name='register'),
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout',
{'next_page': 'core:home'}, name='logout'),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Used to serve static media in a dev environment. Should be disabled in production
|
Make logout redirect to home
|
Make logout redirect to home
|
Python
|
mit
|
joshsamara/game-website,joshsamara/game-website,joshsamara/game-website
|
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from core.views import Home, register
# Use this file to import all other url
from game_website import settings
urlpatterns = patterns(
# Examples:
# url(r'^blog/', include('blog.urls')),
'',
url(r'^$', Home.as_view(), name='home'),
url(r'^games/', include('core.urls.games')),
url(r'^register/$', register, name='register'),
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
- url(r'^logout/$', 'django.contrib.auth.views.logout_then_login', name='logout'),
+ url(r'^logout/$', 'django.contrib.auth.views.logout',
-
+ {'next_page': 'core:home'}, name='logout'),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Used to serve static media in a dev environment. Should be disabled in production
|
Make logout redirect to home
|
## Code Before:
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from core.views import Home, register
# Use this file to import all other url
from game_website import settings
urlpatterns = patterns(
# Examples:
# url(r'^blog/', include('blog.urls')),
'',
url(r'^$', Home.as_view(), name='home'),
url(r'^games/', include('core.urls.games')),
url(r'^register/$', register, name='register'),
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login', name='logout'),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Used to serve static media in a dev environment. Should be disabled in production
## Instruction:
Make logout redirect to home
## Code After:
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from core.views import Home, register
# Use this file to import all other url
from game_website import settings
urlpatterns = patterns(
# Examples:
# url(r'^blog/', include('blog.urls')),
'',
url(r'^$', Home.as_view(), name='home'),
url(r'^games/', include('core.urls.games')),
url(r'^register/$', register, name='register'),
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout',
{'next_page': 'core:home'}, name='logout'),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Used to serve static media in a dev environment. Should be disabled in production
|
// ... existing code ...
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout',
{'next_page': 'core:home'}, name='logout'),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
// ... rest of the code ...
|
ff391fc302b6d4e9fab0653522fa2fe47d8e8faa
|
beavy_modules/url_extractor/lib.py
|
beavy_modules/url_extractor/lib.py
|
import lassie
from pyembed.core import PyEmbed
from beavy.app import cache
pyembed = PyEmbed()
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
return pyembed.embed('http://www.youtube.com/watch?v=_PEdPBEpQfY', **kwargs)
|
from pyembed.core import PyEmbed
from beavy.app import cache
from lassie import Lassie
import re
# lassie by default isn't extensive enough for us
# configure it so that it is.
from lassie.filters import FILTER_MAPS
FILTER_MAPS['meta']['open_graph']['map'].update({
# general
"og:type": "type",
"og:site_name": "site_name",
})
FILTER_MAPS['meta']['generic']['pattern'] = re.compile(r"^(description|keywords|title|author|article:|music:|video:|book:)", re.I)
FILTER_MAPS['meta']['generic']['map'].update({
# articles
"article:published_time": "published_time",
"article:modified_time": "modified_time",
"article:expiration_time": "expiration_time",
"article:section": "section",
"article:section_url": "section_url",
# music
"music:duration": "duration",
"music:release_date": "release_date",
# video
"video:duration": "duration",
"video:release_date": "release_date",
# author
"author": "author",
# book
"book:author": "author",
"book:isbn": "isbn",
"book:release_date": "release_date",
})
# general configuration
pyembed = PyEmbed()
lassie = Lassie()
lassie.request_opts = {
'headers':{
# tell Lassie to tell others it is facebook
'User-Agent': 'facebookexternalhit/1.1'
}
}
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
return pyembed.embed(url, **kwargs)
|
Configure Lassie for more information
|
Configure Lassie for more information
|
Python
|
mpl-2.0
|
beavyHQ/beavy,beavyHQ/beavy,beavyHQ/beavy,beavyHQ/beavy
|
- import lassie
+
from pyembed.core import PyEmbed
from beavy.app import cache
+ from lassie import Lassie
+ import re
+
+ # lassie by default isn't extensive enough for us
+ # configure it so that it is.
+
+ from lassie.filters import FILTER_MAPS
+ FILTER_MAPS['meta']['open_graph']['map'].update({
+ # general
+ "og:type": "type",
+ "og:site_name": "site_name",
+ })
+
+ FILTER_MAPS['meta']['generic']['pattern'] = re.compile(r"^(description|keywords|title|author|article:|music:|video:|book:)", re.I)
+ FILTER_MAPS['meta']['generic']['map'].update({
+ # articles
+ "article:published_time": "published_time",
+ "article:modified_time": "modified_time",
+ "article:expiration_time": "expiration_time",
+ "article:section": "section",
+ "article:section_url": "section_url",
+
+ # music
+ "music:duration": "duration",
+ "music:release_date": "release_date",
+
+ # video
+ "video:duration": "duration",
+ "video:release_date": "release_date",
+
+ # author
+ "author": "author",
+
+ # book
+ "book:author": "author",
+ "book:isbn": "isbn",
+ "book:release_date": "release_date",
+ })
+
+ # general configuration
pyembed = PyEmbed()
+
+ lassie = Lassie()
+ lassie.request_opts = {
+ 'headers':{
+ # tell Lassie to tell others it is facebook
+ 'User-Agent': 'facebookexternalhit/1.1'
+ }
+ }
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
- return pyembed.embed('http://www.youtube.com/watch?v=_PEdPBEpQfY', **kwargs)
+ return pyembed.embed(url, **kwargs)
|
Configure Lassie for more information
|
## Code Before:
import lassie
from pyembed.core import PyEmbed
from beavy.app import cache
pyembed = PyEmbed()
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
return pyembed.embed('http://www.youtube.com/watch?v=_PEdPBEpQfY', **kwargs)
## Instruction:
Configure Lassie for more information
## Code After:
from pyembed.core import PyEmbed
from beavy.app import cache
from lassie import Lassie
import re
# lassie by default isn't extensive enough for us
# configure it so that it is.
from lassie.filters import FILTER_MAPS
FILTER_MAPS['meta']['open_graph']['map'].update({
# general
"og:type": "type",
"og:site_name": "site_name",
})
FILTER_MAPS['meta']['generic']['pattern'] = re.compile(r"^(description|keywords|title|author|article:|music:|video:|book:)", re.I)
FILTER_MAPS['meta']['generic']['map'].update({
# articles
"article:published_time": "published_time",
"article:modified_time": "modified_time",
"article:expiration_time": "expiration_time",
"article:section": "section",
"article:section_url": "section_url",
# music
"music:duration": "duration",
"music:release_date": "release_date",
# video
"video:duration": "duration",
"video:release_date": "release_date",
# author
"author": "author",
# book
"book:author": "author",
"book:isbn": "isbn",
"book:release_date": "release_date",
})
# general configuration
pyembed = PyEmbed()
lassie = Lassie()
lassie.request_opts = {
'headers':{
# tell Lassie to tell others it is facebook
'User-Agent': 'facebookexternalhit/1.1'
}
}
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
return pyembed.embed(url, **kwargs)
|
// ... existing code ...
from pyembed.core import PyEmbed
// ... modified code ...
from lassie import Lassie
import re
# lassie by default isn't extensive enough for us
# configure it so that it is.
from lassie.filters import FILTER_MAPS
FILTER_MAPS['meta']['open_graph']['map'].update({
# general
"og:type": "type",
"og:site_name": "site_name",
})
FILTER_MAPS['meta']['generic']['pattern'] = re.compile(r"^(description|keywords|title|author|article:|music:|video:|book:)", re.I)
FILTER_MAPS['meta']['generic']['map'].update({
# articles
"article:published_time": "published_time",
"article:modified_time": "modified_time",
"article:expiration_time": "expiration_time",
"article:section": "section",
"article:section_url": "section_url",
# music
"music:duration": "duration",
"music:release_date": "release_date",
# video
"video:duration": "duration",
"video:release_date": "release_date",
# author
"author": "author",
# book
"book:author": "author",
"book:isbn": "isbn",
"book:release_date": "release_date",
})
# general configuration
pyembed = PyEmbed()
lassie = Lassie()
lassie.request_opts = {
'headers':{
# tell Lassie to tell others it is facebook
'User-Agent': 'facebookexternalhit/1.1'
}
}
...
def extract_oembed(url, **kwargs):
return pyembed.embed(url, **kwargs)
// ... rest of the code ...
|
dec3aaaefe2afdf4d3ce19dc808257ea49cc2b00
|
hsml.py
|
hsml.py
|
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
return radius
|
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
|
Fix for old numpy versions without cbrt
|
Fix for old numpy versions without cbrt
|
Python
|
mit
|
sbird/fake_spectra,sbird/fake_spectra,sbird/fake_spectra
|
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
+ except AttributeError:
+ #This is for really old numpys without cbrts
+ radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
+
return radius
|
Fix for old numpy versions without cbrt
|
## Code Before:
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
return radius
## Instruction:
Fix for old numpy versions without cbrt
## Code After:
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
|
// ... existing code ...
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
// ... rest of the code ...
|
b1b0919f47f43d27bc409528617af8dbd4eea41c
|
tests/test_imports.py
|
tests/test_imports.py
|
import unittest
class TestImport(unittest.TestCase):
# Basic import tests for packages without any.
def test_basic(self):
import bq_helper
import cleverhans
from rl.agents.dqn import DQNAgent
|
import unittest
class TestImport(unittest.TestCase):
# Basic import tests for packages without any.
def test_basic(self):
import bq_helper
import cleverhans
|
Remove import test for keras-rl
|
Remove import test for keras-rl
This package was removed in #747
|
Python
|
apache-2.0
|
Kaggle/docker-python,Kaggle/docker-python
|
import unittest
class TestImport(unittest.TestCase):
# Basic import tests for packages without any.
def test_basic(self):
import bq_helper
import cleverhans
- from rl.agents.dqn import DQNAgent
|
Remove import test for keras-rl
|
## Code Before:
import unittest
class TestImport(unittest.TestCase):
# Basic import tests for packages without any.
def test_basic(self):
import bq_helper
import cleverhans
from rl.agents.dqn import DQNAgent
## Instruction:
Remove import test for keras-rl
## Code After:
import unittest
class TestImport(unittest.TestCase):
# Basic import tests for packages without any.
def test_basic(self):
import bq_helper
import cleverhans
|
// ... existing code ...
import cleverhans
// ... rest of the code ...
|
21f06746eebe809f5d7017394b4c7c50ba319066
|
street_score/bulkadmin/forms.py
|
street_score/bulkadmin/forms.py
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField()
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField(help_text="""
<p>Select the CSV file to upload. The file should have a header for
each column you want to populate. When you have selected your
file, click the 'Upload' button below.</p>
""")
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
Add a help_text string to the admin form
|
Add a help_text string to the admin form
|
Python
|
mit
|
openplans/streetscore,openplans/streetscore,openplans/streetscore
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
- data = forms.FileField()
+ data = forms.FileField(help_text="""
+ <p>Select the CSV file to upload. The file should have a header for
+ each column you want to populate. When you have selected your
+ file, click the 'Upload' button below.</p>
+ """)
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
Add a help_text string to the admin form
|
## Code Before:
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField()
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
## Instruction:
Add a help_text string to the admin form
## Code After:
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField(help_text="""
<p>Select the CSV file to upload. The file should have a header for
each column you want to populate. When you have selected your
file, click the 'Upload' button below.</p>
""")
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
// ... existing code ...
class BulkUploadForm(forms.Form):
data = forms.FileField(help_text="""
<p>Select the CSV file to upload. The file should have a header for
each column you want to populate. When you have selected your
file, click the 'Upload' button below.</p>
""")
// ... rest of the code ...
|
d6b3f4e0798f430761f51529ea61c368e1ce610a
|
utest/contrib/testrunner/test_pybot_arguments_validation.py
|
utest/contrib/testrunner/test_pybot_arguments_validation.py
|
import unittest
import robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
self.assertRaisesRegex(robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
self.assertRaisesRegexp(robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
|
import unittest
import robotide.lib.robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
self.assertRaisesRegex(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
self.assertRaisesRegexp(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
|
Fix unit test for when robotframework is not installed.
|
Fix unit test for when robotframework is not installed.
|
Python
|
apache-2.0
|
HelioGuilherme66/RIDE,robotframework/RIDE,robotframework/RIDE,HelioGuilherme66/RIDE,HelioGuilherme66/RIDE,robotframework/RIDE,HelioGuilherme66/RIDE,robotframework/RIDE
|
import unittest
- import robot.errors
+ import robotide.lib.robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
- self.assertRaisesRegex(robot.errors.DataError,
+ self.assertRaisesRegex(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
- self.assertRaisesRegexp(robot.errors.DataError,
+ self.assertRaisesRegexp(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
|
Fix unit test for when robotframework is not installed.
|
## Code Before:
import unittest
import robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
self.assertRaisesRegex(robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
self.assertRaisesRegexp(robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
## Instruction:
Fix unit test for when robotframework is not installed.
## Code After:
import unittest
import robotide.lib.robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
self.assertRaisesRegex(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
self.assertRaisesRegexp(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
|
// ... existing code ...
import unittest
import robotide.lib.robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
// ... modified code ...
try:
self.assertRaisesRegex(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
...
except AttributeError: # Python2
self.assertRaisesRegexp(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
// ... rest of the code ...
|
1e5d549b6fdf62c1016451f9dfe566c9546b2f38
|
bcbio/bed/__init__.py
|
bcbio/bed/__init__.py
|
import pybedtools as bt
import six
def concat(bed_files, catted=None):
"""
recursively concat a set of BED files, returning a
sorted bedtools object of the result
"""
if len(bed_files) == 0:
if catted:
return catted.sort()
else:
return catted
if not catted:
bed_files = list(bed_files)
catted = bt.BedTool(bed_files.pop())
else:
catted = catted.cat(bed_files.pop(), postmerge=False,
force_truncate=False)
return concat(bed_files, catted)
def merge(bedfiles):
"""
given a BED file or list of BED files merge them an return a bedtools object
"""
if isinstance(bedfiles, list):
catted = concat(bedfiles)
else:
catted = concat([bedfiles])
if catted:
return concat(bedfiles).sort().merge()
else:
return catted
|
import pybedtools as bt
import six
def concat(bed_files, catted=None):
"""
recursively concat a set of BED files, returning a
sorted bedtools object of the result
"""
bed_files = [x for x in bed_files if x]
if len(bed_files) == 0:
if catted:
# move to a .bed extension for downstream tools if not already
sorted_bed = catted.sort()
if not sorted_bed.fn.endswith(".bed"):
return sorted_bed.moveto(sorted_bed.fn + ".bed")
else:
return sorted_bed
else:
return catted
if not catted:
bed_files = list(bed_files)
catted = bt.BedTool(bed_files.pop())
else:
catted = catted.cat(bed_files.pop(), postmerge=False,
force_truncate=False)
return concat(bed_files, catted)
def merge(bedfiles):
"""
given a BED file or list of BED files merge them an return a bedtools object
"""
if isinstance(bedfiles, list):
catted = concat(bedfiles)
else:
catted = concat([bedfiles])
if catted:
return concat(bedfiles).sort().merge()
else:
return catted
|
Move the file to have an extension of .bed.
|
Move the file to have an extension of .bed.
A lot of tools detect what type of file it is by the extension,
so this lets us pass on the BedTool.fn as the filename and
not break things.
|
Python
|
mit
|
guillermo-carrasco/bcbio-nextgen,lbeltrame/bcbio-nextgen,gifford-lab/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,vladsaveliev/bcbio-nextgen,brainstorm/bcbio-nextgen,mjafin/bcbio-nextgen,lbeltrame/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,brainstorm/bcbio-nextgen,fw1121/bcbio-nextgen,verdurin/bcbio-nextgen,lpantano/bcbio-nextgen,brainstorm/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,a113n/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,chapmanb/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,chapmanb/bcbio-nextgen,biocyberman/bcbio-nextgen,chapmanb/bcbio-nextgen,vladsaveliev/bcbio-nextgen,gifford-lab/bcbio-nextgen,fw1121/bcbio-nextgen,hjanime/bcbio-nextgen,biocyberman/bcbio-nextgen,lpantano/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,lbeltrame/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,mjafin/bcbio-nextgen,a113n/bcbio-nextgen,lpantano/bcbio-nextgen,fw1121/bcbio-nextgen,gifford-lab/bcbio-nextgen,verdurin/bcbio-nextgen,biocyberman/bcbio-nextgen,vladsaveliev/bcbio-nextgen,a113n/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,mjafin/bcbio-nextgen,hjanime/bcbio-nextgen,hjanime/bcbio-nextgen,verdurin/bcbio-nextgen
|
import pybedtools as bt
import six
def concat(bed_files, catted=None):
"""
recursively concat a set of BED files, returning a
sorted bedtools object of the result
"""
+ bed_files = [x for x in bed_files if x]
if len(bed_files) == 0:
if catted:
+ # move to a .bed extension for downstream tools if not already
- return catted.sort()
+ sorted_bed = catted.sort()
+ if not sorted_bed.fn.endswith(".bed"):
+ return sorted_bed.moveto(sorted_bed.fn + ".bed")
+ else:
+ return sorted_bed
else:
return catted
if not catted:
bed_files = list(bed_files)
catted = bt.BedTool(bed_files.pop())
else:
catted = catted.cat(bed_files.pop(), postmerge=False,
force_truncate=False)
return concat(bed_files, catted)
def merge(bedfiles):
"""
given a BED file or list of BED files merge them an return a bedtools object
"""
if isinstance(bedfiles, list):
catted = concat(bedfiles)
else:
catted = concat([bedfiles])
if catted:
return concat(bedfiles).sort().merge()
else:
return catted
|
Move the file to have an extension of .bed.
|
## Code Before:
import pybedtools as bt
import six
def concat(bed_files, catted=None):
"""
recursively concat a set of BED files, returning a
sorted bedtools object of the result
"""
if len(bed_files) == 0:
if catted:
return catted.sort()
else:
return catted
if not catted:
bed_files = list(bed_files)
catted = bt.BedTool(bed_files.pop())
else:
catted = catted.cat(bed_files.pop(), postmerge=False,
force_truncate=False)
return concat(bed_files, catted)
def merge(bedfiles):
"""
given a BED file or list of BED files merge them an return a bedtools object
"""
if isinstance(bedfiles, list):
catted = concat(bedfiles)
else:
catted = concat([bedfiles])
if catted:
return concat(bedfiles).sort().merge()
else:
return catted
## Instruction:
Move the file to have an extension of .bed.
## Code After:
import pybedtools as bt
import six
def concat(bed_files, catted=None):
"""
recursively concat a set of BED files, returning a
sorted bedtools object of the result
"""
bed_files = [x for x in bed_files if x]
if len(bed_files) == 0:
if catted:
# move to a .bed extension for downstream tools if not already
sorted_bed = catted.sort()
if not sorted_bed.fn.endswith(".bed"):
return sorted_bed.moveto(sorted_bed.fn + ".bed")
else:
return sorted_bed
else:
return catted
if not catted:
bed_files = list(bed_files)
catted = bt.BedTool(bed_files.pop())
else:
catted = catted.cat(bed_files.pop(), postmerge=False,
force_truncate=False)
return concat(bed_files, catted)
def merge(bedfiles):
"""
given a BED file or list of BED files merge them an return a bedtools object
"""
if isinstance(bedfiles, list):
catted = concat(bedfiles)
else:
catted = concat([bedfiles])
if catted:
return concat(bedfiles).sort().merge()
else:
return catted
|
// ... existing code ...
"""
bed_files = [x for x in bed_files if x]
if len(bed_files) == 0:
// ... modified code ...
if catted:
# move to a .bed extension for downstream tools if not already
sorted_bed = catted.sort()
if not sorted_bed.fn.endswith(".bed"):
return sorted_bed.moveto(sorted_bed.fn + ".bed")
else:
return sorted_bed
else:
// ... rest of the code ...
|
f8f0335a1a790b1ef8163a2be968b29769be80a2
|
arim/models.py
|
arim/models.py
|
from django.db import models
class Lease(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
|
from django.db import models
from ipaddr import IPv4Address
class Lease(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
def __str__(self):
return unicode(self).encode('ascii', 'replace')
def __unicode__(self):
return unicode(IPv4Address(self.ip)) + u' = ' + unicode(self.mac)
def __repr__(self):
return u'<Lease: ' + unicode(self) + u'>'
|
Add __str__, __unicode__, and __repr__
|
Add __str__, __unicode__, and __repr__
|
Python
|
bsd-3-clause
|
drkitty/arim,OSU-Net/arim,OSU-Net/arim,drkitty/arim,drkitty/arim,OSU-Net/arim
|
from django.db import models
+ from ipaddr import IPv4Address
class Lease(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
+ def __str__(self):
+ return unicode(self).encode('ascii', 'replace')
+
+ def __unicode__(self):
+ return unicode(IPv4Address(self.ip)) + u' = ' + unicode(self.mac)
+
+ def __repr__(self):
+ return u'<Lease: ' + unicode(self) + u'>'
+
|
Add __str__, __unicode__, and __repr__
|
## Code Before:
from django.db import models
class Lease(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
## Instruction:
Add __str__, __unicode__, and __repr__
## Code After:
from django.db import models
from ipaddr import IPv4Address
class Lease(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
def __str__(self):
return unicode(self).encode('ascii', 'replace')
def __unicode__(self):
return unicode(IPv4Address(self.ip)) + u' = ' + unicode(self.mac)
def __repr__(self):
return u'<Lease: ' + unicode(self) + u'>'
|
...
from django.db import models
from ipaddr import IPv4Address
...
date = models.IntegerField()
def __str__(self):
return unicode(self).encode('ascii', 'replace')
def __unicode__(self):
return unicode(IPv4Address(self.ip)) + u' = ' + unicode(self.mac)
def __repr__(self):
return u'<Lease: ' + unicode(self) + u'>'
...
|
257afb0046c4af30bbfe0d46c36f0ec3257051b6
|
glooey/__init__.py
|
glooey/__init__.py
|
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
|
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
from . import themes
|
Make the themes module available by default.
|
Make the themes module available by default.
|
Python
|
mit
|
kxgames/glooey,kxgames/glooey
|
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
+ from . import themes
|
Make the themes module available by default.
|
## Code Before:
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
## Instruction:
Make the themes module available by default.
## Code After:
__version__ = '0.1.0'
from .widget import *
from .root import *
from .containers import *
from .miscellaneous import *
from . import drawing
from . import themes
|
# ... existing code ...
from . import drawing
from . import themes
# ... rest of the code ...
|
89a5f257cd1fb285db78b6178e9418fbf48fdaf4
|
YouKnowShit/DownloadFilesRename.py
|
YouKnowShit/DownloadFilesRename.py
|
import requests
import bs4
import os
import urllib.request
import shutil
import re
distDir = 'F:\\utorrent\\WEST'
p = re.compile(r'(\D+\d+)\w*(.\w+)')
filenames = os.listdir(distDir)
upperfilenames = []
print(filenames)
for filenamepref in filenames:
if (filenamepref.find('_') > 0):
filenameprefit = filenamepref[filenamepref.index('_'):]
else:
filenameprefit = filenamepref
filenamepost = filenameprefit.replace('-', '').replace('_', '')\
.replace(' ', '').replace('.1080p', '').replace('.720p', '')
distname = p.search(filenamepost).group(1).upper() + p.search(filenamepost).group(2).lower()
print(distname)
os.rename(distDir + os.sep + filenamepref, distDir + os.sep + distname)
|
import os
import re
distDir = 'H:\\temp'
p = re.compile(r'(\D+\d+)\w*(.\w+)')
filenames = os.listdir(distDir)
upperfilenames = []
print(filenames)
for filenamepref in filenames:
if filenamepref.find('_') > 0:
filenameprefit = filenamepref[filenamepref.index('_'):]
else:
filenameprefit = filenamepref
filenamepost = filenameprefit.replace('-', '').replace('_', '')\
.replace(' ', '').replace('.1080p', '').replace('.720p', '')\
.replace('[thz.la]', '').replace('[Thz.la]', '')
distname = p.search(filenamepost).group(1).upper() + p.search(filenamepost).group(2).lower()
print(distname)
os.rename(distDir + os.sep + filenamepref, distDir + os.sep + distname)
|
Remove [thz.la] from file names.
|
Remove [thz.la] from file names.
|
Python
|
mit
|
jiangtianyu2009/PiSoftCake
|
- import requests
- import bs4
import os
- import urllib.request
- import shutil
import re
- distDir = 'F:\\utorrent\\WEST'
+ distDir = 'H:\\temp'
p = re.compile(r'(\D+\d+)\w*(.\w+)')
filenames = os.listdir(distDir)
upperfilenames = []
print(filenames)
for filenamepref in filenames:
- if (filenamepref.find('_') > 0):
+ if filenamepref.find('_') > 0:
filenameprefit = filenamepref[filenamepref.index('_'):]
else:
filenameprefit = filenamepref
filenamepost = filenameprefit.replace('-', '').replace('_', '')\
- .replace(' ', '').replace('.1080p', '').replace('.720p', '')
+ .replace(' ', '').replace('.1080p', '').replace('.720p', '')\
+ .replace('[thz.la]', '').replace('[Thz.la]', '')
distname = p.search(filenamepost).group(1).upper() + p.search(filenamepost).group(2).lower()
print(distname)
os.rename(distDir + os.sep + filenamepref, distDir + os.sep + distname)
+
|
Remove [thz.la] from file names.
|
## Code Before:
import requests
import bs4
import os
import urllib.request
import shutil
import re
distDir = 'F:\\utorrent\\WEST'
p = re.compile(r'(\D+\d+)\w*(.\w+)')
filenames = os.listdir(distDir)
upperfilenames = []
print(filenames)
for filenamepref in filenames:
if (filenamepref.find('_') > 0):
filenameprefit = filenamepref[filenamepref.index('_'):]
else:
filenameprefit = filenamepref
filenamepost = filenameprefit.replace('-', '').replace('_', '')\
.replace(' ', '').replace('.1080p', '').replace('.720p', '')
distname = p.search(filenamepost).group(1).upper() + p.search(filenamepost).group(2).lower()
print(distname)
os.rename(distDir + os.sep + filenamepref, distDir + os.sep + distname)
## Instruction:
Remove [thz.la] from file names.
## Code After:
import os
import re
distDir = 'H:\\temp'
p = re.compile(r'(\D+\d+)\w*(.\w+)')
filenames = os.listdir(distDir)
upperfilenames = []
print(filenames)
for filenamepref in filenames:
if filenamepref.find('_') > 0:
filenameprefit = filenamepref[filenamepref.index('_'):]
else:
filenameprefit = filenamepref
filenamepost = filenameprefit.replace('-', '').replace('_', '')\
.replace(' ', '').replace('.1080p', '').replace('.720p', '')\
.replace('[thz.la]', '').replace('[Thz.la]', '')
distname = p.search(filenamepost).group(1).upper() + p.search(filenamepost).group(2).lower()
print(distname)
os.rename(distDir + os.sep + filenamepref, distDir + os.sep + distname)
|
...
import os
import re
...
distDir = 'H:\\temp'
p = re.compile(r'(\D+\d+)\w*(.\w+)')
...
for filenamepref in filenames:
if filenamepref.find('_') > 0:
filenameprefit = filenamepref[filenamepref.index('_'):]
...
filenamepost = filenameprefit.replace('-', '').replace('_', '')\
.replace(' ', '').replace('.1080p', '').replace('.720p', '')\
.replace('[thz.la]', '').replace('[Thz.la]', '')
distname = p.search(filenamepost).group(1).upper() + p.search(filenamepost).group(2).lower()
...
|
207871f4f057d88f67bad0c371f880664dcee062
|
pydirections/route_requester.py
|
pydirections/route_requester.py
|
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, mode="driving", **kwargs):
self.mode = mode
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
|
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, **kwargs):
self.mode = "driving"
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
|
Build custom route requester class
|
Build custom route requester class
|
Python
|
apache-2.0
|
apranav19/pydirections
|
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
- def __init__(self, mode="driving", **kwargs):
+ def __init__(self, **kwargs):
- self.mode = mode
+ self.mode = "driving"
self.origin = kwargs['origin']
self.destination = kwargs['destination']
+
def set_api_key(self, key):
self.api_key = key
return self
|
Build custom route requester class
|
## Code Before:
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, mode="driving", **kwargs):
self.mode = mode
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
## Instruction:
Build custom route requester class
## Code After:
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, **kwargs):
self.mode = "driving"
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
|
// ... existing code ...
def __init__(self, **kwargs):
self.mode = "driving"
self.origin = kwargs['origin']
// ... modified code ...
def set_api_key(self, key):
// ... rest of the code ...
|
e386b013b4c0124c623bd99dcb1a1d01b6e6bd86
|
supriya/__init__.py
|
supriya/__init__.py
|
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat, SampleFormat, SoundFile,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets, SupriyaConfiguration,
)
from abjad.tools.topleveltools import ( # noqa
graph, new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction,
Buffer,
BufferGroup,
Bus,
BusGroup,
Group,
Server,
Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat,
SampleFormat,
SoundFile,
play,
render,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate,
DoneAction,
Envelope,
Range,
SynthDef,
SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets,
Profiler,
SupriyaConfiguration,
)
from supriya.tools.wrappertools import ( # noqa
Say,
)
from abjad.tools.topleveltools import ( # noqa
graph,
new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
Add play, render and Say to toplevel namespace.
|
Add play, render and Say to toplevel namespace.
|
Python
|
mit
|
Pulgama/supriya,Pulgama/supriya,Pulgama/supriya,josiah-wolf-oberholtzer/supriya,Pulgama/supriya
|
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
- AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
+ AddAction,
+ Buffer,
+ BufferGroup,
+ Bus,
+ BusGroup,
+ Group,
+ Server,
+ Synth,
)
from supriya.tools.soundfiletools import ( # noqa
- HeaderFormat, SampleFormat, SoundFile,
+ HeaderFormat,
+ SampleFormat,
+ SoundFile,
+ play,
+ render,
)
from supriya.tools.synthdeftools import ( # noqa
- CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
+ CalculationRate,
+ DoneAction,
+ Envelope,
+ Range,
+ SynthDef,
+ SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
+ Assets,
+ Profiler,
- Assets, SupriyaConfiguration,
+ SupriyaConfiguration,
+ )
+ from supriya.tools.wrappertools import ( # noqa
+ Say,
)
from abjad.tools.topleveltools import ( # noqa
- graph, new,
+ graph,
+ new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
Add play, render and Say to toplevel namespace.
|
## Code Before:
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat, SampleFormat, SoundFile,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets, SupriyaConfiguration,
)
from abjad.tools.topleveltools import ( # noqa
graph, new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
## Instruction:
Add play, render and Say to toplevel namespace.
## Code After:
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction,
Buffer,
BufferGroup,
Bus,
BusGroup,
Group,
Server,
Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat,
SampleFormat,
SoundFile,
play,
render,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate,
DoneAction,
Envelope,
Range,
SynthDef,
SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets,
Profiler,
SupriyaConfiguration,
)
from supriya.tools.wrappertools import ( # noqa
Say,
)
from abjad.tools.topleveltools import ( # noqa
graph,
new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
# ... existing code ...
from supriya.tools.servertools import ( # noqa
AddAction,
Buffer,
BufferGroup,
Bus,
BusGroup,
Group,
Server,
Synth,
)
# ... modified code ...
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat,
SampleFormat,
SoundFile,
play,
render,
)
...
from supriya.tools.synthdeftools import ( # noqa
CalculationRate,
DoneAction,
Envelope,
Range,
SynthDef,
SynthDefBuilder,
)
...
from supriya.tools.systemtools import ( # noqa
Assets,
Profiler,
SupriyaConfiguration,
)
from supriya.tools.wrappertools import ( # noqa
Say,
)
...
from abjad.tools.topleveltools import ( # noqa
graph,
new,
)
# ... rest of the code ...
|
0575b4345fc21ca537a95866ff2a24d25128c698
|
readthedocs/config/find.py
|
readthedocs/config/find.py
|
"""Helper functions to search files."""
from __future__ import division, print_function, unicode_literals
import os
import re
def find_all(path, filename_regex):
"""Find all files in ``path`` that match ``filename_regex`` regex."""
path = os.path.abspath(path)
for root, dirs, files in os.walk(path, topdown=True):
dirs.sort()
for filename in files:
if re.match(filename_regex, filename):
yield os.path.abspath(os.path.join(root, filename))
def find_one(path, filename_regex):
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
for _path in find_all(path, filename_regex):
return _path
return ''
|
"""Helper functions to search files."""
from __future__ import division, print_function, unicode_literals
import os
import re
def find_one(path, filename_regex):
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
_path = os.path.abspath(path)
for filename in os.listdir(_path):
if re.match(filename_regex, filename):
return os.path.join(_path, filename)
return ''
|
Remove logic for iterating directories to search for config file
|
Remove logic for iterating directories to search for config file
|
Python
|
mit
|
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
|
"""Helper functions to search files."""
from __future__ import division, print_function, unicode_literals
import os
import re
- def find_all(path, filename_regex):
- """Find all files in ``path`` that match ``filename_regex`` regex."""
- path = os.path.abspath(path)
- for root, dirs, files in os.walk(path, topdown=True):
- dirs.sort()
- for filename in files:
- if re.match(filename_regex, filename):
- yield os.path.abspath(os.path.join(root, filename))
-
-
def find_one(path, filename_regex):
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
- for _path in find_all(path, filename_regex):
- return _path
+ _path = os.path.abspath(path)
+ for filename in os.listdir(_path):
+ if re.match(filename_regex, filename):
+ return os.path.join(_path, filename)
+
return ''
|
Remove logic for iterating directories to search for config file
|
## Code Before:
"""Helper functions to search files."""
from __future__ import division, print_function, unicode_literals
import os
import re
def find_all(path, filename_regex):
"""Find all files in ``path`` that match ``filename_regex`` regex."""
path = os.path.abspath(path)
for root, dirs, files in os.walk(path, topdown=True):
dirs.sort()
for filename in files:
if re.match(filename_regex, filename):
yield os.path.abspath(os.path.join(root, filename))
def find_one(path, filename_regex):
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
for _path in find_all(path, filename_regex):
return _path
return ''
## Instruction:
Remove logic for iterating directories to search for config file
## Code After:
"""Helper functions to search files."""
from __future__ import division, print_function, unicode_literals
import os
import re
def find_one(path, filename_regex):
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
_path = os.path.abspath(path)
for filename in os.listdir(_path):
if re.match(filename_regex, filename):
return os.path.join(_path, filename)
return ''
|
# ... existing code ...
def find_one(path, filename_regex):
# ... modified code ...
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
_path = os.path.abspath(path)
for filename in os.listdir(_path):
if re.match(filename_regex, filename):
return os.path.join(_path, filename)
return ''
# ... rest of the code ...
|
9e77d9a40ae13cff09051c9975361dca9259b426
|
gala/__init__.py
|
gala/__init__.py
|
from __future__ import absolute_import
import sys, logging
if sys.version_info[:2] < (2,6):
logging.warning('Gala has not been tested on Python versions prior to 2.6'+
' (%d.%d detected).'%sys.version_info[:2])
__author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
del sys, logging
__all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify',
'stack_np', 'app_logger', 'option_manager', 'features', 'filter']
__version__ = '0.3dev'
|
from __future__ import absolute_import
import sys, logging
if sys.version_info[:2] < (2,6):
logging.warning('Gala has not been tested on Python versions prior to 2.6'+
' (%d.%d detected).'%sys.version_info[:2])
__author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
del sys, logging
__all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify',
'stack_np', 'app_logger', 'option_manager', 'features', 'filter']
__version__ = '0.3dev'
|
Update email in module init
|
Update email in module init
|
Python
|
bsd-3-clause
|
jni/gala,janelia-flyem/gala
|
from __future__ import absolute_import
import sys, logging
if sys.version_info[:2] < (2,6):
logging.warning('Gala has not been tested on Python versions prior to 2.6'+
' (%d.%d detected).'%sys.version_info[:2])
- __author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
+ __author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
del sys, logging
__all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify',
'stack_np', 'app_logger', 'option_manager', 'features', 'filter']
__version__ = '0.3dev'
|
Update email in module init
|
## Code Before:
from __future__ import absolute_import
import sys, logging
if sys.version_info[:2] < (2,6):
logging.warning('Gala has not been tested on Python versions prior to 2.6'+
' (%d.%d detected).'%sys.version_info[:2])
__author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
del sys, logging
__all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify',
'stack_np', 'app_logger', 'option_manager', 'features', 'filter']
__version__ = '0.3dev'
## Instruction:
Update email in module init
## Code After:
from __future__ import absolute_import
import sys, logging
if sys.version_info[:2] < (2,6):
logging.warning('Gala has not been tested on Python versions prior to 2.6'+
' (%d.%d detected).'%sys.version_info[:2])
__author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
del sys, logging
__all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify',
'stack_np', 'app_logger', 'option_manager', 'features', 'filter']
__version__ = '0.3dev'
|
...
__author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
...
|
83ed5ca9bc388dbe9b2d82510842a99b3a2e5ce7
|
src/personalisation/middleware.py
|
src/personalisation/middleware.py
|
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
|
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
if not request.session.get('segments'):
request.session['segments'] = []
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
|
Create empty 'segments' object in session if none exists
|
Create empty 'segments' object in session if none exists
|
Python
|
mit
|
LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation
|
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
+ if not request.session.get('segments'):
+ request.session['segments'] = []
+
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
|
Create empty 'segments' object in session if none exists
|
## Code Before:
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
## Instruction:
Create empty 'segments' object in session if none exists
## Code After:
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
if not request.session.get('segments'):
request.session['segments'] = []
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
|
...
if not request.session.get('segments'):
request.session['segments'] = []
print(request.session['segments'])
...
|
d2444e557e097f375ee830ebf382d68b702b80da
|
src/ansible/forms.py
|
src/ansible/forms.py
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea)
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
|
Set Textarea width and height
|
Set Textarea width and height
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
- playbook = forms.CharField(widget=forms.Textarea)
+ playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
|
Set Textarea width and height
|
## Code Before:
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea)
## Instruction:
Set Textarea width and height
## Code After:
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
|
# ... existing code ...
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
# ... rest of the code ...
|
373f0f4637103d526c75cae304740e621ad3c39c
|
resize.py
|
resize.py
|
import cv2
import sys
import numpy as np
def resize(src, w_ratio, h_ratio):
height = src.shape[0]
width = src.shape[1]
dst = cv2.resize(src,(width/100*w_ratio,height/100*h_ratio))
return dst
if __name__ == '__main__':
param = sys.argv
if (len(param) != 4):
print ("Usage: $ python " + param[0] + " sample.jpg wide_ratio height_ratio")
quit()
# open image file
try:
input_img = cv2.imread(param[1])
except:
print ('faild to load %s' % param[1])
quit()
if input_img is None:
print ('faild to load %s' % param[1])
quit()
w_ratio = int(param[2])
h_ratio = int(param[3])
output_img = resize(input_img, w_ratio, h_ratio)
cv2.imwrite(param[1], output_img)
|
import cv2
import sys
def resize(src, w_ratio, h_ratio):
height = src.shape[0]
width = src.shape[1]
dst = cv2.resize(src,((int)(width/100*w_ratio),(int)(height/100*h_ratio)))
return dst
if __name__ == '__main__':
param = sys.argv
if (len(param) != 4):
print ("Usage: $ python " + param[0] + " sample.jpg wide_ratio height_ratio")
quit()
# open image file
try:
input_img = cv2.imread(param[1])
except:
print ('faild to load %s' % param[1])
quit()
if input_img is None:
print ('faild to load %s' % param[1])
quit()
w_ratio = int(param[2])
h_ratio = int(param[3])
output_img = resize(input_img, w_ratio, h_ratio)
cv2.imwrite(param[1], output_img)
|
Fix bug and delete unused library
|
Fix bug and delete unused library
|
Python
|
mit
|
karaage0703/python-image-processing,karaage0703/python-image-processing
|
import cv2
import sys
- import numpy as np
def resize(src, w_ratio, h_ratio):
height = src.shape[0]
width = src.shape[1]
- dst = cv2.resize(src,(width/100*w_ratio,height/100*h_ratio))
+ dst = cv2.resize(src,((int)(width/100*w_ratio),(int)(height/100*h_ratio)))
return dst
if __name__ == '__main__':
param = sys.argv
if (len(param) != 4):
print ("Usage: $ python " + param[0] + " sample.jpg wide_ratio height_ratio")
quit()
# open image file
try:
input_img = cv2.imread(param[1])
except:
print ('faild to load %s' % param[1])
quit()
if input_img is None:
print ('faild to load %s' % param[1])
quit()
w_ratio = int(param[2])
h_ratio = int(param[3])
output_img = resize(input_img, w_ratio, h_ratio)
cv2.imwrite(param[1], output_img)
|
Fix bug and delete unused library
|
## Code Before:
import cv2
import sys
import numpy as np
def resize(src, w_ratio, h_ratio):
height = src.shape[0]
width = src.shape[1]
dst = cv2.resize(src,(width/100*w_ratio,height/100*h_ratio))
return dst
if __name__ == '__main__':
param = sys.argv
if (len(param) != 4):
print ("Usage: $ python " + param[0] + " sample.jpg wide_ratio height_ratio")
quit()
# open image file
try:
input_img = cv2.imread(param[1])
except:
print ('faild to load %s' % param[1])
quit()
if input_img is None:
print ('faild to load %s' % param[1])
quit()
w_ratio = int(param[2])
h_ratio = int(param[3])
output_img = resize(input_img, w_ratio, h_ratio)
cv2.imwrite(param[1], output_img)
## Instruction:
Fix bug and delete unused library
## Code After:
import cv2
import sys
def resize(src, w_ratio, h_ratio):
height = src.shape[0]
width = src.shape[1]
dst = cv2.resize(src,((int)(width/100*w_ratio),(int)(height/100*h_ratio)))
return dst
if __name__ == '__main__':
param = sys.argv
if (len(param) != 4):
print ("Usage: $ python " + param[0] + " sample.jpg wide_ratio height_ratio")
quit()
# open image file
try:
input_img = cv2.imread(param[1])
except:
print ('faild to load %s' % param[1])
quit()
if input_img is None:
print ('faild to load %s' % param[1])
quit()
w_ratio = int(param[2])
h_ratio = int(param[3])
output_img = resize(input_img, w_ratio, h_ratio)
cv2.imwrite(param[1], output_img)
|
// ... existing code ...
import sys
// ... modified code ...
width = src.shape[1]
dst = cv2.resize(src,((int)(width/100*w_ratio),(int)(height/100*h_ratio)))
return dst
// ... rest of the code ...
|
50367a2d73c395a85bb7dae058f9435be6ad7c36
|
vtimshow/__init__.py
|
vtimshow/__init__.py
|
import logging
import os
import vitables
_defaults = dict(
AUTHOR = "Keith F Prussing",
AUTHOR_EMAIL = "[email protected]",
LICENSE = "MIT",
PLUGIN_CLASS = "VtImageViewer",
PLUGIN_NAME = "Image Viewer",
COMMENT = "Display data sets as images",
VERSION = "{VERSION!s}",
UID = "image_viewer"
)
_defaults["FOLDER"], _defaults["MODULE_NAME"] = os.path.split(
os.path.dirname(__file__)
)
_defaults["LOGGER"] = logging.getLogger(_defaults["MODULE_NAME"])
_defaults["LOGGER"].addHandler(logging.NullHandler())
__docformat__ = "restructuredtext"
__version__ = _defaults["VERSION"]
plugin_class = _defaults["PLUGIN_CLASS"]
plugin_name = _defaults["PLUGIN_NAME"]
comment = _defaults["COMMENT"]
from vtimshow.vtimageviewer import VtImageViewer
|
import logging
import os
import vitables
_defaults = dict(
AUTHOR = "Keith F Prussing",
AUTHOR_EMAIL = "[email protected]",
LICENSE = "MIT",
PLUGIN_CLASS = "VtImageViewer",
PLUGIN_NAME = "Image Viewer",
COMMENT = "Display data sets as images",
VERSION = "{VERSION!s}",
UID = "image_viewer"
)
_defaults["FOLDER"], _defaults["MODULE_NAME"] = os.path.split(
os.path.dirname(__file__)
)
_defaults["LOGGER"] = logging.getLogger(_defaults["MODULE_NAME"])
_defaults["LOGGER"].addHandler(logging.NullHandler())
__docformat__ = "restructuredtext"
__version__ = _defaults["VERSION"]
plugin_class = _defaults["PLUGIN_CLASS"]
plugin_name = _defaults["PLUGIN_NAME"]
comment = _defaults["COMMENT"]
from vtimshow.vtimageviewer import VtImageViewer
def _setup_logger(name):
"""
Add the GUI's logging window as a stream handler.
By default, the stream logger is removed during the invocation of
``vitables``. The logging window in the GUI is a stream handler for
the ``vitables`` logger _only_. This method will add the logging
window in the GUI as a stream handler for the named logger. The
method checks to see if ``vitables`` is an active application. If
it is not, nothing is done.
"""
logger = logging.getLogger(name)
app = vitables.utils.getApp()
if app is not None:
stream = logging.StreamHandler(app.gui.logger)
stream.setFormatter(
logging.Formatter(vitables.vtgui._GUI_LOG_FORMAT)
)
logger.addHandler(stream)
return
_setup_logger(_defaults["MODULE_NAME"])
|
Add method to log to console
|
Add method to log to console
Add a method to set the GUI logging window to be the stream handler for
my plug in.
|
Python
|
mit
|
kprussing/vtimshow
|
import logging
import os
import vitables
_defaults = dict(
AUTHOR = "Keith F Prussing",
AUTHOR_EMAIL = "[email protected]",
LICENSE = "MIT",
PLUGIN_CLASS = "VtImageViewer",
PLUGIN_NAME = "Image Viewer",
COMMENT = "Display data sets as images",
VERSION = "{VERSION!s}",
UID = "image_viewer"
)
_defaults["FOLDER"], _defaults["MODULE_NAME"] = os.path.split(
os.path.dirname(__file__)
)
_defaults["LOGGER"] = logging.getLogger(_defaults["MODULE_NAME"])
_defaults["LOGGER"].addHandler(logging.NullHandler())
__docformat__ = "restructuredtext"
__version__ = _defaults["VERSION"]
plugin_class = _defaults["PLUGIN_CLASS"]
plugin_name = _defaults["PLUGIN_NAME"]
comment = _defaults["COMMENT"]
from vtimshow.vtimageviewer import VtImageViewer
+ def _setup_logger(name):
+ """
+ Add the GUI's logging window as a stream handler.
+ By default, the stream logger is removed during the invocation of
+ ``vitables``. The logging window in the GUI is a stream handler for
+ the ``vitables`` logger _only_. This method will add the logging
+ window in the GUI as a stream handler for the named logger. The
+ method checks to see if ``vitables`` is an active application. If
+ it is not, nothing is done.
+
+ """
+ logger = logging.getLogger(name)
+ app = vitables.utils.getApp()
+ if app is not None:
+ stream = logging.StreamHandler(app.gui.logger)
+ stream.setFormatter(
+ logging.Formatter(vitables.vtgui._GUI_LOG_FORMAT)
+ )
+ logger.addHandler(stream)
+
+ return
+
+ _setup_logger(_defaults["MODULE_NAME"])
+
+
|
Add method to log to console
|
## Code Before:
import logging
import os
import vitables
_defaults = dict(
AUTHOR = "Keith F Prussing",
AUTHOR_EMAIL = "[email protected]",
LICENSE = "MIT",
PLUGIN_CLASS = "VtImageViewer",
PLUGIN_NAME = "Image Viewer",
COMMENT = "Display data sets as images",
VERSION = "{VERSION!s}",
UID = "image_viewer"
)
_defaults["FOLDER"], _defaults["MODULE_NAME"] = os.path.split(
os.path.dirname(__file__)
)
_defaults["LOGGER"] = logging.getLogger(_defaults["MODULE_NAME"])
_defaults["LOGGER"].addHandler(logging.NullHandler())
__docformat__ = "restructuredtext"
__version__ = _defaults["VERSION"]
plugin_class = _defaults["PLUGIN_CLASS"]
plugin_name = _defaults["PLUGIN_NAME"]
comment = _defaults["COMMENT"]
from vtimshow.vtimageviewer import VtImageViewer
## Instruction:
Add method to log to console
## Code After:
import logging
import os
import vitables
_defaults = dict(
AUTHOR = "Keith F Prussing",
AUTHOR_EMAIL = "[email protected]",
LICENSE = "MIT",
PLUGIN_CLASS = "VtImageViewer",
PLUGIN_NAME = "Image Viewer",
COMMENT = "Display data sets as images",
VERSION = "{VERSION!s}",
UID = "image_viewer"
)
_defaults["FOLDER"], _defaults["MODULE_NAME"] = os.path.split(
os.path.dirname(__file__)
)
_defaults["LOGGER"] = logging.getLogger(_defaults["MODULE_NAME"])
_defaults["LOGGER"].addHandler(logging.NullHandler())
__docformat__ = "restructuredtext"
__version__ = _defaults["VERSION"]
plugin_class = _defaults["PLUGIN_CLASS"]
plugin_name = _defaults["PLUGIN_NAME"]
comment = _defaults["COMMENT"]
from vtimshow.vtimageviewer import VtImageViewer
def _setup_logger(name):
"""
Add the GUI's logging window as a stream handler.
By default, the stream logger is removed during the invocation of
``vitables``. The logging window in the GUI is a stream handler for
the ``vitables`` logger _only_. This method will add the logging
window in the GUI as a stream handler for the named logger. The
method checks to see if ``vitables`` is an active application. If
it is not, nothing is done.
"""
logger = logging.getLogger(name)
app = vitables.utils.getApp()
if app is not None:
stream = logging.StreamHandler(app.gui.logger)
stream.setFormatter(
logging.Formatter(vitables.vtgui._GUI_LOG_FORMAT)
)
logger.addHandler(stream)
return
_setup_logger(_defaults["MODULE_NAME"])
|
...
def _setup_logger(name):
"""
Add the GUI's logging window as a stream handler.
By default, the stream logger is removed during the invocation of
``vitables``. The logging window in the GUI is a stream handler for
the ``vitables`` logger _only_. This method will add the logging
window in the GUI as a stream handler for the named logger. The
method checks to see if ``vitables`` is an active application. If
it is not, nothing is done.
"""
logger = logging.getLogger(name)
app = vitables.utils.getApp()
if app is not None:
stream = logging.StreamHandler(app.gui.logger)
stream.setFormatter(
logging.Formatter(vitables.vtgui._GUI_LOG_FORMAT)
)
logger.addHandler(stream)
return
_setup_logger(_defaults["MODULE_NAME"])
...
|
25cd8afdfede8a522f8d0f08ee4678a2e9c46a4b
|
curious/commands/__init__.py
|
curious/commands/__init__.py
|
import functools
from curious.commands.command import Command
def command(*args, **kwargs):
"""
A decorator to mark a function as a command.
This will put a `factory` attribute on the function, which can later be called to create the Command instance.
All arguments are passed to the Command class.
"""
def __inner(func):
factory = functools.partial(Command, func, *args, **kwargs)
func.factory = factory
return func
return __inner
def event(func):
"""
Marks a function as an event.
:param func: Either the function, or the name to give to the event.
"""
if isinstance(func, str):
def __innr(f):
f.event = func
return f
return __innr
else:
func.event = func.__name__[3:]
return func
|
import functools
from curious.commands.command import Command
def command(*args, klass: type=Command, **kwargs):
"""
A decorator to mark a function as a command.
This will put a `factory` attribute on the function, which can later be called to create the Command instance.
All arguments are passed to the Command class.
:param klass: The command class type to wrap the object in.
"""
def __inner(func):
factory = functools.partial(klass, func, *args, **kwargs)
func.factory = factory
return func
return __inner
def event(func):
"""
Marks a function as an event.
:param func: Either the function, or the name to give to the event.
"""
if isinstance(func, str):
def __innr(f):
f.event = func
return f
return __innr
else:
func.event = func.__name__[3:]
return func
|
Allow changing what object is returned from Command instances.
|
Allow changing what object is returned from Command instances.
|
Python
|
mit
|
SunDwarf/curious
|
import functools
from curious.commands.command import Command
- def command(*args, **kwargs):
+ def command(*args, klass: type=Command, **kwargs):
"""
A decorator to mark a function as a command.
This will put a `factory` attribute on the function, which can later be called to create the Command instance.
All arguments are passed to the Command class.
+
+ :param klass: The command class type to wrap the object in.
"""
def __inner(func):
- factory = functools.partial(Command, func, *args, **kwargs)
+ factory = functools.partial(klass, func, *args, **kwargs)
func.factory = factory
return func
return __inner
def event(func):
"""
Marks a function as an event.
:param func: Either the function, or the name to give to the event.
"""
if isinstance(func, str):
def __innr(f):
f.event = func
return f
return __innr
else:
func.event = func.__name__[3:]
return func
|
Allow changing what object is returned from Command instances.
|
## Code Before:
import functools
from curious.commands.command import Command
def command(*args, **kwargs):
"""
A decorator to mark a function as a command.
This will put a `factory` attribute on the function, which can later be called to create the Command instance.
All arguments are passed to the Command class.
"""
def __inner(func):
factory = functools.partial(Command, func, *args, **kwargs)
func.factory = factory
return func
return __inner
def event(func):
"""
Marks a function as an event.
:param func: Either the function, or the name to give to the event.
"""
if isinstance(func, str):
def __innr(f):
f.event = func
return f
return __innr
else:
func.event = func.__name__[3:]
return func
## Instruction:
Allow changing what object is returned from Command instances.
## Code After:
import functools
from curious.commands.command import Command
def command(*args, klass: type=Command, **kwargs):
"""
A decorator to mark a function as a command.
This will put a `factory` attribute on the function, which can later be called to create the Command instance.
All arguments are passed to the Command class.
:param klass: The command class type to wrap the object in.
"""
def __inner(func):
factory = functools.partial(klass, func, *args, **kwargs)
func.factory = factory
return func
return __inner
def event(func):
"""
Marks a function as an event.
:param func: Either the function, or the name to give to the event.
"""
if isinstance(func, str):
def __innr(f):
f.event = func
return f
return __innr
else:
func.event = func.__name__[3:]
return func
|
// ... existing code ...
def command(*args, klass: type=Command, **kwargs):
"""
// ... modified code ...
All arguments are passed to the Command class.
:param klass: The command class type to wrap the object in.
"""
...
def __inner(func):
factory = functools.partial(klass, func, *args, **kwargs)
func.factory = factory
// ... rest of the code ...
|
dc9070c14892114b9e05e84cc9195d0fb58f859d
|
api_bouncer/serializers.py
|
api_bouncer/serializers.py
|
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ApiSerializer(serializers.ModelSerializer):
class Meta:
model = Api
fields = '__all__'
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
|
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
consumer = serializers.SlugRelatedField(
many=False,
read_only=False,
slug_field='username',
queryset=Consumer.objects.all()
)
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
api = serializers.SlugRelatedField(
many=False,
read_only=True,
slug_field='name'
)
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
class ApiSerializer(serializers.ModelSerializer):
plugins = PluginSerializer(
many=True,
read_only=False,
)
class Meta:
model = Api
fields = '__all__'
|
Use SlugRelatedField for foreign keys for better readability
|
Use SlugRelatedField for foreign keys for better readability
|
Python
|
apache-2.0
|
menecio/django-api-bouncer
|
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
- class ApiSerializer(serializers.ModelSerializer):
- class Meta:
- model = Api
- fields = '__all__'
-
-
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
+ consumer = serializers.SlugRelatedField(
+ many=False,
+ read_only=False,
+ slug_field='username',
+ queryset=Consumer.objects.all()
+ )
+
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
+ api = serializers.SlugRelatedField(
+ many=False,
+ read_only=True,
+ slug_field='name'
+ )
+
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
+
+ class ApiSerializer(serializers.ModelSerializer):
+ plugins = PluginSerializer(
+ many=True,
+ read_only=False,
+ )
+
+ class Meta:
+ model = Api
+ fields = '__all__'
+
|
Use SlugRelatedField for foreign keys for better readability
|
## Code Before:
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ApiSerializer(serializers.ModelSerializer):
class Meta:
model = Api
fields = '__all__'
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
## Instruction:
Use SlugRelatedField for foreign keys for better readability
## Code After:
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
consumer = serializers.SlugRelatedField(
many=False,
read_only=False,
slug_field='username',
queryset=Consumer.objects.all()
)
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
api = serializers.SlugRelatedField(
many=False,
read_only=True,
slug_field='name'
)
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
class ApiSerializer(serializers.ModelSerializer):
plugins = PluginSerializer(
many=True,
read_only=False,
)
class Meta:
model = Api
fields = '__all__'
|
# ... existing code ...
class ConsumerSerializer(serializers.ModelSerializer):
# ... modified code ...
class ConsumerKeySerializer(serializers.ModelSerializer):
consumer = serializers.SlugRelatedField(
many=False,
read_only=False,
slug_field='username',
queryset=Consumer.objects.all()
)
class Meta:
...
class PluginSerializer(serializers.ModelSerializer):
api = serializers.SlugRelatedField(
many=False,
read_only=True,
slug_field='name'
)
class Meta:
...
return data
class ApiSerializer(serializers.ModelSerializer):
plugins = PluginSerializer(
many=True,
read_only=False,
)
class Meta:
model = Api
fields = '__all__'
# ... rest of the code ...
|
5f935bb952a616c3fe9ca24fa862621dfc1bda24
|
guv/hubs/watchers.py
|
guv/hubs/watchers.py
|
from guv.hubs.abc import AbstractListener
class FdListener(AbstractListener):
"""Default implementation of :cls:`AbstractListener`
"""
pass
class PollFdListener(AbstractListener):
def __init__(self, evtype, fd, cb):
"""
:param cb: Callable
:param args: tuple of arguments to be passed to cb
"""
super().__init__(evtype, fd)
self.cb = cb
class UvFdListener(AbstractListener):
def __init__(self, evtype, fd, handle):
"""
:param handle: underlying pyuv Handle object
:type handle: pyuv.Handle
"""
super().__init__(evtype, fd)
self.handle = handle
|
from guv.hubs.abc import AbstractListener
class PollFdListener(AbstractListener):
def __init__(self, evtype, fd, cb):
"""
:param cb: Callable
:param args: tuple of arguments to be passed to cb
"""
super().__init__(evtype, fd)
self.cb = cb
class UvFdListener(AbstractListener):
def __init__(self, evtype, fd, handle):
"""
:param handle: underlying pyuv Handle object
:type handle: pyuv.Handle
"""
super().__init__(evtype, fd)
self.handle = handle
|
Remove unneeded default Listener implementation
|
Remove unneeded default Listener implementation
|
Python
|
mit
|
veegee/guv,veegee/guv
|
from guv.hubs.abc import AbstractListener
-
-
- class FdListener(AbstractListener):
- """Default implementation of :cls:`AbstractListener`
- """
- pass
class PollFdListener(AbstractListener):
def __init__(self, evtype, fd, cb):
"""
:param cb: Callable
:param args: tuple of arguments to be passed to cb
"""
super().__init__(evtype, fd)
self.cb = cb
class UvFdListener(AbstractListener):
def __init__(self, evtype, fd, handle):
"""
:param handle: underlying pyuv Handle object
:type handle: pyuv.Handle
"""
super().__init__(evtype, fd)
self.handle = handle
|
Remove unneeded default Listener implementation
|
## Code Before:
from guv.hubs.abc import AbstractListener
class FdListener(AbstractListener):
"""Default implementation of :cls:`AbstractListener`
"""
pass
class PollFdListener(AbstractListener):
def __init__(self, evtype, fd, cb):
"""
:param cb: Callable
:param args: tuple of arguments to be passed to cb
"""
super().__init__(evtype, fd)
self.cb = cb
class UvFdListener(AbstractListener):
def __init__(self, evtype, fd, handle):
"""
:param handle: underlying pyuv Handle object
:type handle: pyuv.Handle
"""
super().__init__(evtype, fd)
self.handle = handle
## Instruction:
Remove unneeded default Listener implementation
## Code After:
from guv.hubs.abc import AbstractListener
class PollFdListener(AbstractListener):
def __init__(self, evtype, fd, cb):
"""
:param cb: Callable
:param args: tuple of arguments to be passed to cb
"""
super().__init__(evtype, fd)
self.cb = cb
class UvFdListener(AbstractListener):
def __init__(self, evtype, fd, handle):
"""
:param handle: underlying pyuv Handle object
:type handle: pyuv.Handle
"""
super().__init__(evtype, fd)
self.handle = handle
|
# ... existing code ...
from guv.hubs.abc import AbstractListener
# ... rest of the code ...
|
f668f6066864b1efe3863cdb43b8fee4e08a312b
|
test/test_mk_dirs.py
|
test/test_mk_dirs.py
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
os.rmdir(Launcher.updatedir)
|
Remove Launcher.updatedir after mkdirs test
|
Remove Launcher.updatedir after mkdirs test
Should go into fixture later
|
Python
|
lgpl-2.1
|
rlee287/pyautoupdate,rlee287/pyautoupdate
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
+ os.rmdir(Launcher.updatedir)
|
Remove Launcher.updatedir after mkdirs test
|
## Code Before:
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
## Instruction:
Remove Launcher.updatedir after mkdirs test
## Code After:
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
os.rmdir(Launcher.updatedir)
|
...
assert os.path.isdir(Launcher.updatedir)
os.rmdir(Launcher.updatedir)
...
|
9454bfa12e36cdab9bf803cf169c1d979bb27381
|
cmus_notify/notifications.py
|
cmus_notify/notifications.py
|
"""Contains code related to notifications."""
import notify2
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
|
"""Contains code related to notifications."""
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
import notify2
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
|
Fix notify2 being imported with the module
|
Fix notify2 being imported with the module
|
Python
|
mit
|
AntoineGagne/cmus-notify
|
"""Contains code related to notifications."""
-
- import notify2
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
+ import notify2
+
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
|
Fix notify2 being imported with the module
|
## Code Before:
"""Contains code related to notifications."""
import notify2
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
## Instruction:
Fix notify2 being imported with the module
## Code After:
"""Contains code related to notifications."""
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
import notify2
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
|
...
"""Contains code related to notifications."""
...
"""
import notify2
notify2.init(arguments['application_name'])
...
|
c33b876c664178de92099b6553a6030789bdaaa4
|
app/v2/templates/get_templates.py
|
app/v2/templates/get_templates.py
|
from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
print(templates)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
|
from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
|
Remove get all template print
|
Remove get all template print
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
- print(templates)
-
return jsonify(
templates=[template.serialize() for template in templates]
), 200
|
Remove get all template print
|
## Code Before:
from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
print(templates)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
## Instruction:
Remove get all template print
## Code After:
from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
|
...
return jsonify(
...
|
8e7a92bce03ca472bc78bb9df5e2c9cf063c29b7
|
temba/campaigns/tasks.py
|
temba/campaigns/tasks.py
|
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'):
try:
push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'):
try:
push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
Use correct field to get org from
|
Use correct field to get org from
|
Python
|
agpl-3.0
|
harrissoerja/rapidpro,pulilab/rapidpro,pulilab/rapidpro,reyrodrigues/EU-SMS,tsotetsi/textily-web,harrissoerja/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,tsotetsi/textily-web,Thapelo-Tsotetsi/rapidpro,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,praekelt/rapidpro,harrissoerja/rapidpro,praekelt/rapidpro,reyrodrigues/EU-SMS,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,tsotetsi/textily-web,reyrodrigues/EU-SMS,ewheeler/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,praekelt/rapidpro,ewheeler/rapidpro,pulilab/rapidpro,praekelt/rapidpro
|
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
- for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'):
+ for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'):
try:
- push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
+ push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
Use correct field to get org from
|
## Code Before:
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'):
try:
push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
## Instruction:
Use correct field to get org from
## Code After:
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'):
try:
push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
// ... existing code ...
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'):
try:
push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
// ... rest of the code ...
|
13350cdf5598ac0ed55e5404cf6d407300b4c1ac
|
apps/home/forms.py
|
apps/home/forms.py
|
import re
from django import forms
from apps.chat.models import Chats
from django.utils.translation import ugettext as _
class CreateChatForm(forms.Form):
pass
class JoinChatForm(forms.Form):
chat_token = forms.CharField(required=True, max_length=24, label='')
chat_token.widget = forms.TextInput({"maxlength": 24,
"pattern": "[a-z0-9]{24}",
"placeholder": _("please enter your code here..."),
"class": "chat-token"})
user_token = False
def clean_chat_token(self):
"""
Validate chat token
"""
new_chat_token = self.cleaned_data['chat_token']
match = re.search(r'[a-z0-9]{24}', new_chat_token)
if not match:
raise forms.ValidationError(_('Invalid code.'))
self.user_token = Chats.join_to_chat(new_chat_token)
if not self.user_token:
raise forms.ValidationError(_('Invalid code.'))
|
import re
from django import forms
from apps.chat.models import Chats
from django.utils.translation import ugettext as _
class CreateChatForm(forms.Form):
pass
class JoinChatForm(forms.Form):
chat_token = forms.CharField(required=True, max_length=24, label='')
chat_token.widget = forms.TextInput({"maxlength": 24,
"pattern": "[a-z0-9]{24}",
"autocomplete": "off",
"placeholder": _("please enter your code here..."),
"class": "chat-token"})
user_token = False
def clean_chat_token(self):
"""
Validate chat token
"""
new_chat_token = self.cleaned_data['chat_token']
match = re.search(r'[a-z0-9]{24}', new_chat_token)
if not match:
raise forms.ValidationError(_('Invalid code.'))
self.user_token = Chats.join_to_chat(new_chat_token)
if not self.user_token:
raise forms.ValidationError(_('Invalid code.'))
|
Set autocomplete off for chat token form field
|
Set autocomplete off for chat token form field
|
Python
|
bsd-3-clause
|
MySmile/sfchat,MySmile/sfchat,MySmile/sfchat,MySmile/sfchat
|
import re
from django import forms
from apps.chat.models import Chats
from django.utils.translation import ugettext as _
class CreateChatForm(forms.Form):
pass
class JoinChatForm(forms.Form):
chat_token = forms.CharField(required=True, max_length=24, label='')
chat_token.widget = forms.TextInput({"maxlength": 24,
"pattern": "[a-z0-9]{24}",
+ "autocomplete": "off",
"placeholder": _("please enter your code here..."),
"class": "chat-token"})
user_token = False
def clean_chat_token(self):
"""
Validate chat token
"""
new_chat_token = self.cleaned_data['chat_token']
match = re.search(r'[a-z0-9]{24}', new_chat_token)
if not match:
raise forms.ValidationError(_('Invalid code.'))
self.user_token = Chats.join_to_chat(new_chat_token)
if not self.user_token:
raise forms.ValidationError(_('Invalid code.'))
|
Set autocomplete off for chat token form field
|
## Code Before:
import re
from django import forms
from apps.chat.models import Chats
from django.utils.translation import ugettext as _
class CreateChatForm(forms.Form):
pass
class JoinChatForm(forms.Form):
chat_token = forms.CharField(required=True, max_length=24, label='')
chat_token.widget = forms.TextInput({"maxlength": 24,
"pattern": "[a-z0-9]{24}",
"placeholder": _("please enter your code here..."),
"class": "chat-token"})
user_token = False
def clean_chat_token(self):
"""
Validate chat token
"""
new_chat_token = self.cleaned_data['chat_token']
match = re.search(r'[a-z0-9]{24}', new_chat_token)
if not match:
raise forms.ValidationError(_('Invalid code.'))
self.user_token = Chats.join_to_chat(new_chat_token)
if not self.user_token:
raise forms.ValidationError(_('Invalid code.'))
## Instruction:
Set autocomplete off for chat token form field
## Code After:
import re
from django import forms
from apps.chat.models import Chats
from django.utils.translation import ugettext as _
class CreateChatForm(forms.Form):
pass
class JoinChatForm(forms.Form):
chat_token = forms.CharField(required=True, max_length=24, label='')
chat_token.widget = forms.TextInput({"maxlength": 24,
"pattern": "[a-z0-9]{24}",
"autocomplete": "off",
"placeholder": _("please enter your code here..."),
"class": "chat-token"})
user_token = False
def clean_chat_token(self):
"""
Validate chat token
"""
new_chat_token = self.cleaned_data['chat_token']
match = re.search(r'[a-z0-9]{24}', new_chat_token)
if not match:
raise forms.ValidationError(_('Invalid code.'))
self.user_token = Chats.join_to_chat(new_chat_token)
if not self.user_token:
raise forms.ValidationError(_('Invalid code.'))
|
# ... existing code ...
"pattern": "[a-z0-9]{24}",
"autocomplete": "off",
"placeholder": _("please enter your code here..."),
# ... rest of the code ...
|
a8ec60daaee52603a1c3bab879a5eee9f0fd931b
|
ddd/dataobjects/datatype.py
|
ddd/dataobjects/datatype.py
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',conversion=None,unit='-',constant=False):
self.basetype=basetype
if not conversion:
self.conversion=DddConversion(type='binary',fraction=1)
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',bitsize=8,signed=False,conversion=None,unit='-',constant=False):
self.basetype=basetype
self.bitsize=bitsize
self.signed=signed
if not conversion:
self.conversion=DddConversion(type='1to1')
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'bitsize':self.bitsize,
'signed':self.signed,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
Split basetype of DddDatatype into basetype,bitsize,signed
|
Split basetype of DddDatatype into basetype,bitsize,signed
|
Python
|
mit
|
toesus/ddd,Sauci/ddd,toesus/ddd,Sauci/ddd,Sauci/ddd
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
- def __init__(self,basetype='',conversion=None,unit='-',constant=False):
+ def __init__(self,basetype='',bitsize=8,signed=False,conversion=None,unit='-',constant=False):
self.basetype=basetype
+ self.bitsize=bitsize
+ self.signed=signed
if not conversion:
- self.conversion=DddConversion(type='binary',fraction=1)
+ self.conversion=DddConversion(type='1to1')
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
+ 'bitsize':self.bitsize,
+ 'signed':self.signed,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
Split basetype of DddDatatype into basetype,bitsize,signed
|
## Code Before:
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',conversion=None,unit='-',constant=False):
self.basetype=basetype
if not conversion:
self.conversion=DddConversion(type='binary',fraction=1)
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
## Instruction:
Split basetype of DddDatatype into basetype,bitsize,signed
## Code After:
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',bitsize=8,signed=False,conversion=None,unit='-',constant=False):
self.basetype=basetype
self.bitsize=bitsize
self.signed=signed
if not conversion:
self.conversion=DddConversion(type='1to1')
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'bitsize':self.bitsize,
'signed':self.signed,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
# ... existing code ...
class DddDatatype(DataObject):
def __init__(self,basetype='',bitsize=8,signed=False,conversion=None,unit='-',constant=False):
self.basetype=basetype
self.bitsize=bitsize
self.signed=signed
if not conversion:
self.conversion=DddConversion(type='1to1')
else:
# ... modified code ...
tmp.update({'basetype':self.basetype,
'bitsize':self.bitsize,
'signed':self.signed,
'unit':self.unit,
# ... rest of the code ...
|
9f8f929b8fdc0ebfdb609621f4613d31b73639b0
|
sipa/utils/link_patch.py
|
sipa/utils/link_patch.py
|
import re
from flask import request
from markdown import Markdown
from markdown.extensions import Extension
from markdown.postprocessors import Postprocessor
def absolute_path_replacer(match):
"""Correct the url in a regex match prepending the absolute path"""
assert len(match.groups()) == 2
prefix = request.script_root
if prefix.endswith("/"):
prefix = prefix[:-1]
return "{key}=\"{path}\"".format(
key=match.group(1),
path=prefix + match.group(2)
)
class LinkPostprocessor(Postprocessor):
def run(self, text):
return re.sub(
'(href|src)="(/[^"]*)"',
absolute_path_replacer,
text,
flags=re.IGNORECASE,
)
class AbsoluteLinkExtension(Extension):
""" Add the absolute link patch to Markdown. """
def extendMarkdown(self, md: Markdown):
""" Add an instance of TableProcessor to BlockParser. """
md.postprocessors.register(
LinkPostprocessor(md),
'link_patch',
50,
)
def makeExtension(*args, **kwargs):
return AbsoluteLinkExtension(*args, **kwargs)
|
import re
from flask import request
from markdown import Markdown
from markdown.extensions import Extension
from markdown.postprocessors import Postprocessor
def absolute_path_replacer(match):
"""Correct the url in a regex match prepending the absolute path"""
assert len(match.groups()) == 2
prefix = request.script_root
if prefix.endswith("/"):
prefix = prefix[:-1]
return "{key}=\"{path}\"".format(
key=match.group(1),
path=prefix + match.group(2)
)
class LinkPostprocessor(Postprocessor):
"""A postprocessor fixing absolute links in the HTML result of a markdown render.
This needs to be a postprocessor compared to a treeprocessor, because
the link may be in a pure HTML block. Those blocks however are processed by means
of the [`MarkdownInHtmlExtension`](https://python-markdown.github.io/extensions/md_in_html/),
which replaces HTML by a tag in a preprocessing step and replaces this tag by the HTML
in a postprocessing step.
Therefore, the only way to catch these links is with a postprocessor and a regex.
"""
def run(self, text):
return re.sub(
'(href|src)="(/[^"]*)"',
absolute_path_replacer,
text,
flags=re.IGNORECASE,
)
class AbsoluteLinkExtension(Extension):
""" Add the absolute link patch to Markdown. """
def extendMarkdown(self, md: Markdown):
""" Add an instance of TableProcessor to BlockParser. """
# see https://python-markdown.github.io/extensions/api/#registries for what's happening here
md.postprocessors.register(
LinkPostprocessor(md),
'link_patch',
# we need to run after `raw_html` (prio=30). See `LinkPostprocessor` docstring.
20,
)
def makeExtension(*args, **kwargs):
return AbsoluteLinkExtension(*args, **kwargs)
|
Fix priority of link postprocessor
|
Fix priority of link postprocessor
Fixes #424
|
Python
|
mit
|
agdsn/sipa,agdsn/sipa,agdsn/sipa,agdsn/sipa
|
import re
from flask import request
from markdown import Markdown
from markdown.extensions import Extension
from markdown.postprocessors import Postprocessor
def absolute_path_replacer(match):
"""Correct the url in a regex match prepending the absolute path"""
assert len(match.groups()) == 2
prefix = request.script_root
if prefix.endswith("/"):
prefix = prefix[:-1]
return "{key}=\"{path}\"".format(
key=match.group(1),
path=prefix + match.group(2)
)
class LinkPostprocessor(Postprocessor):
+ """A postprocessor fixing absolute links in the HTML result of a markdown render.
+
+ This needs to be a postprocessor compared to a treeprocessor, because
+ the link may be in a pure HTML block. Those blocks however are processed by means
+ of the [`MarkdownInHtmlExtension`](https://python-markdown.github.io/extensions/md_in_html/),
+ which replaces HTML by a tag in a preprocessing step and replaces this tag by the HTML
+ in a postprocessing step.
+ Therefore, the only way to catch these links is with a postprocessor and a regex.
+ """
def run(self, text):
return re.sub(
'(href|src)="(/[^"]*)"',
absolute_path_replacer,
text,
flags=re.IGNORECASE,
)
class AbsoluteLinkExtension(Extension):
""" Add the absolute link patch to Markdown. """
def extendMarkdown(self, md: Markdown):
""" Add an instance of TableProcessor to BlockParser. """
+ # see https://python-markdown.github.io/extensions/api/#registries for what's happening here
md.postprocessors.register(
LinkPostprocessor(md),
'link_patch',
+ # we need to run after `raw_html` (prio=30). See `LinkPostprocessor` docstring.
- 50,
+ 20,
)
def makeExtension(*args, **kwargs):
return AbsoluteLinkExtension(*args, **kwargs)
|
Fix priority of link postprocessor
|
## Code Before:
import re
from flask import request
from markdown import Markdown
from markdown.extensions import Extension
from markdown.postprocessors import Postprocessor
def absolute_path_replacer(match):
"""Correct the url in a regex match prepending the absolute path"""
assert len(match.groups()) == 2
prefix = request.script_root
if prefix.endswith("/"):
prefix = prefix[:-1]
return "{key}=\"{path}\"".format(
key=match.group(1),
path=prefix + match.group(2)
)
class LinkPostprocessor(Postprocessor):
def run(self, text):
return re.sub(
'(href|src)="(/[^"]*)"',
absolute_path_replacer,
text,
flags=re.IGNORECASE,
)
class AbsoluteLinkExtension(Extension):
""" Add the absolute link patch to Markdown. """
def extendMarkdown(self, md: Markdown):
""" Add an instance of TableProcessor to BlockParser. """
md.postprocessors.register(
LinkPostprocessor(md),
'link_patch',
50,
)
def makeExtension(*args, **kwargs):
return AbsoluteLinkExtension(*args, **kwargs)
## Instruction:
Fix priority of link postprocessor
## Code After:
import re
from flask import request
from markdown import Markdown
from markdown.extensions import Extension
from markdown.postprocessors import Postprocessor
def absolute_path_replacer(match):
"""Correct the url in a regex match prepending the absolute path"""
assert len(match.groups()) == 2
prefix = request.script_root
if prefix.endswith("/"):
prefix = prefix[:-1]
return "{key}=\"{path}\"".format(
key=match.group(1),
path=prefix + match.group(2)
)
class LinkPostprocessor(Postprocessor):
"""A postprocessor fixing absolute links in the HTML result of a markdown render.
This needs to be a postprocessor compared to a treeprocessor, because
the link may be in a pure HTML block. Those blocks however are processed by means
of the [`MarkdownInHtmlExtension`](https://python-markdown.github.io/extensions/md_in_html/),
which replaces HTML by a tag in a preprocessing step and replaces this tag by the HTML
in a postprocessing step.
Therefore, the only way to catch these links is with a postprocessor and a regex.
"""
def run(self, text):
return re.sub(
'(href|src)="(/[^"]*)"',
absolute_path_replacer,
text,
flags=re.IGNORECASE,
)
class AbsoluteLinkExtension(Extension):
""" Add the absolute link patch to Markdown. """
def extendMarkdown(self, md: Markdown):
""" Add an instance of TableProcessor to BlockParser. """
# see https://python-markdown.github.io/extensions/api/#registries for what's happening here
md.postprocessors.register(
LinkPostprocessor(md),
'link_patch',
# we need to run after `raw_html` (prio=30). See `LinkPostprocessor` docstring.
20,
)
def makeExtension(*args, **kwargs):
return AbsoluteLinkExtension(*args, **kwargs)
|
# ... existing code ...
class LinkPostprocessor(Postprocessor):
"""A postprocessor fixing absolute links in the HTML result of a markdown render.
This needs to be a postprocessor compared to a treeprocessor, because
the link may be in a pure HTML block. Those blocks however are processed by means
of the [`MarkdownInHtmlExtension`](https://python-markdown.github.io/extensions/md_in_html/),
which replaces HTML by a tag in a preprocessing step and replaces this tag by the HTML
in a postprocessing step.
Therefore, the only way to catch these links is with a postprocessor and a regex.
"""
def run(self, text):
# ... modified code ...
""" Add an instance of TableProcessor to BlockParser. """
# see https://python-markdown.github.io/extensions/api/#registries for what's happening here
md.postprocessors.register(
...
'link_patch',
# we need to run after `raw_html` (prio=30). See `LinkPostprocessor` docstring.
20,
)
# ... rest of the code ...
|
cf44260d057e289a089c1c3c440e5f64366facfa
|
scraping/urls/scrape_fish.py
|
scraping/urls/scrape_fish.py
|
import pandas as pd
from subprocess import check_output
import sys
fish_df = pd.read_csv(sys.argv[1],names=['fish'])
url_dict = {}
for fish in fish_df.fish:
output = check_output(['node','scrape_image_urls.js',fish,sys.argv[2]])
splits = str(output).replace('\\n','').split(' url: ')
urls = [s.split(', width')[0][1:-1] for s in splits[1:]]
url_dict[fish] = urls
url_df = pd.DataFrame(url_dict)
url_df.to_csv(sys.argv[3], sep='|', index=False)
|
import pandas as pd
from subprocess import check_output
import sys
fish_df = pd.read_csv(sys.argv[1],names=['fish'])
dfs = []
for fish in fish_df.fish:
output = check_output(['node','scrape_image_urls.js',fish + ' fish',sys.argv[2]])
splits = str(output).replace('\\n','').split(' url: ')
urls = [s.split(', width')[0][1:-1] for s in splits[1:]]
dfs.append(pd.DataFrame({'fish': fish, 'url': urls}))
out_df = pd.concat(dfs)
out_df.to_csv(sys.argv[3], sep='|', index=False)
|
Update scrape fish to handle different size url sets and to specify to google that we want fish
|
Update scrape fish to handle different size url sets and to specify to google that we want fish
|
Python
|
mit
|
matthew-sochor/fish.io.ai,matthew-sochor/fish.io.ai,matthew-sochor/fish.io.ai,matthew-sochor/fish.io.ai
|
import pandas as pd
from subprocess import check_output
import sys
fish_df = pd.read_csv(sys.argv[1],names=['fish'])
- url_dict = {}
+ dfs = []
for fish in fish_df.fish:
- output = check_output(['node','scrape_image_urls.js',fish,sys.argv[2]])
+ output = check_output(['node','scrape_image_urls.js',fish + ' fish',sys.argv[2]])
splits = str(output).replace('\\n','').split(' url: ')
urls = [s.split(', width')[0][1:-1] for s in splits[1:]]
- url_dict[fish] = urls
+ dfs.append(pd.DataFrame({'fish': fish, 'url': urls}))
- url_df = pd.DataFrame(url_dict)
+ out_df = pd.concat(dfs)
- url_df.to_csv(sys.argv[3], sep='|', index=False)
+ out_df.to_csv(sys.argv[3], sep='|', index=False)
+
|
Update scrape fish to handle different size url sets and to specify to google that we want fish
|
## Code Before:
import pandas as pd
from subprocess import check_output
import sys
fish_df = pd.read_csv(sys.argv[1],names=['fish'])
url_dict = {}
for fish in fish_df.fish:
output = check_output(['node','scrape_image_urls.js',fish,sys.argv[2]])
splits = str(output).replace('\\n','').split(' url: ')
urls = [s.split(', width')[0][1:-1] for s in splits[1:]]
url_dict[fish] = urls
url_df = pd.DataFrame(url_dict)
url_df.to_csv(sys.argv[3], sep='|', index=False)
## Instruction:
Update scrape fish to handle different size url sets and to specify to google that we want fish
## Code After:
import pandas as pd
from subprocess import check_output
import sys
fish_df = pd.read_csv(sys.argv[1],names=['fish'])
dfs = []
for fish in fish_df.fish:
output = check_output(['node','scrape_image_urls.js',fish + ' fish',sys.argv[2]])
splits = str(output).replace('\\n','').split(' url: ')
urls = [s.split(', width')[0][1:-1] for s in splits[1:]]
dfs.append(pd.DataFrame({'fish': fish, 'url': urls}))
out_df = pd.concat(dfs)
out_df.to_csv(sys.argv[3], sep='|', index=False)
|
// ... existing code ...
dfs = []
for fish in fish_df.fish:
output = check_output(['node','scrape_image_urls.js',fish + ' fish',sys.argv[2]])
splits = str(output).replace('\\n','').split(' url: ')
// ... modified code ...
urls = [s.split(', width')[0][1:-1] for s in splits[1:]]
dfs.append(pd.DataFrame({'fish': fish, 'url': urls}))
out_df = pd.concat(dfs)
out_df.to_csv(sys.argv[3], sep='|', index=False)
// ... rest of the code ...
|
c9215a00bfe8d1edaf2840f6cd4b3ae8061c26f5
|
allauth_uwum/provider.py
|
allauth_uwum/provider.py
|
"""The UWUM (Unified WeGovNow User Management) provider."""
from allauth.socialaccount import app_settings
from allauth.socialaccount.providers import registry
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class UWUMAccount(ProviderAccount):
"""The UWUM provider account."""
pass
class UWUMProvider(OAuth2Provider):
"""The UWUM OAuth2 provider."""
id = 'uwum'
name = 'UWUM'
settings = app_settings.PROVIDERS.get(id, {})
account_class = UWUMAccount
def get_default_scope(self):
"""Get the default UWUM scope."""
return ['authentication', 'notify_email']
def extract_uid(self, data):
"""Extract the unique user (UWUM member) identification number."""
member = data.get('member', {})
return str(member.get('id'))
def extract_common_fields(self, data):
"""Extract the common fields for the user (UWUM member)."""
member = data.get('member', {})
return {'username': member.get('name'), 'email': member.get('email')}
registry.register(UWUMProvider)
|
"""The UWUM (Unified WeGovNow User Management) provider."""
from allauth.socialaccount import app_settings
from allauth.socialaccount.providers import registry
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class UWUMAccount(ProviderAccount):
"""The UWUM provider account."""
pass
class UWUMProvider(OAuth2Provider):
"""The UWUM OAuth2 provider."""
id = 'uwum'
name = 'UWUM'
settings = app_settings.PROVIDERS.get(id, {})
account_class = UWUMAccount
def get_default_scope(self):
"""Get the default UWUM scope."""
default_scope = ['authentication']
if app_settings.QUERY_EMAIL:
default_scope.append('notify_email')
return default_scope
def extract_uid(self, data):
"""Extract the unique user (UWUM member) identification number."""
member = data.get('member', {})
return str(member.get('id'))
def extract_common_fields(self, data):
"""Extract the common fields for the user (UWUM member)."""
member = data.get('member', {})
return {'username': member.get('name'), 'email': member.get('email')}
registry.register(UWUMProvider)
|
Set "notify_email" in default scope only if settings allow it
|
Set "notify_email" in default scope only if settings allow it
|
Python
|
mit
|
ExCiteS/django-allauth-uwum
|
"""The UWUM (Unified WeGovNow User Management) provider."""
from allauth.socialaccount import app_settings
from allauth.socialaccount.providers import registry
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class UWUMAccount(ProviderAccount):
"""The UWUM provider account."""
pass
class UWUMProvider(OAuth2Provider):
"""The UWUM OAuth2 provider."""
id = 'uwum'
name = 'UWUM'
settings = app_settings.PROVIDERS.get(id, {})
account_class = UWUMAccount
def get_default_scope(self):
"""Get the default UWUM scope."""
- return ['authentication', 'notify_email']
+ default_scope = ['authentication']
+
+ if app_settings.QUERY_EMAIL:
+ default_scope.append('notify_email')
+
+ return default_scope
def extract_uid(self, data):
"""Extract the unique user (UWUM member) identification number."""
member = data.get('member', {})
return str(member.get('id'))
def extract_common_fields(self, data):
"""Extract the common fields for the user (UWUM member)."""
member = data.get('member', {})
return {'username': member.get('name'), 'email': member.get('email')}
registry.register(UWUMProvider)
|
Set "notify_email" in default scope only if settings allow it
|
## Code Before:
"""The UWUM (Unified WeGovNow User Management) provider."""
from allauth.socialaccount import app_settings
from allauth.socialaccount.providers import registry
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class UWUMAccount(ProviderAccount):
"""The UWUM provider account."""
pass
class UWUMProvider(OAuth2Provider):
"""The UWUM OAuth2 provider."""
id = 'uwum'
name = 'UWUM'
settings = app_settings.PROVIDERS.get(id, {})
account_class = UWUMAccount
def get_default_scope(self):
"""Get the default UWUM scope."""
return ['authentication', 'notify_email']
def extract_uid(self, data):
"""Extract the unique user (UWUM member) identification number."""
member = data.get('member', {})
return str(member.get('id'))
def extract_common_fields(self, data):
"""Extract the common fields for the user (UWUM member)."""
member = data.get('member', {})
return {'username': member.get('name'), 'email': member.get('email')}
registry.register(UWUMProvider)
## Instruction:
Set "notify_email" in default scope only if settings allow it
## Code After:
"""The UWUM (Unified WeGovNow User Management) provider."""
from allauth.socialaccount import app_settings
from allauth.socialaccount.providers import registry
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class UWUMAccount(ProviderAccount):
"""The UWUM provider account."""
pass
class UWUMProvider(OAuth2Provider):
"""The UWUM OAuth2 provider."""
id = 'uwum'
name = 'UWUM'
settings = app_settings.PROVIDERS.get(id, {})
account_class = UWUMAccount
def get_default_scope(self):
"""Get the default UWUM scope."""
default_scope = ['authentication']
if app_settings.QUERY_EMAIL:
default_scope.append('notify_email')
return default_scope
def extract_uid(self, data):
"""Extract the unique user (UWUM member) identification number."""
member = data.get('member', {})
return str(member.get('id'))
def extract_common_fields(self, data):
"""Extract the common fields for the user (UWUM member)."""
member = data.get('member', {})
return {'username': member.get('name'), 'email': member.get('email')}
registry.register(UWUMProvider)
|
// ... existing code ...
"""Get the default UWUM scope."""
default_scope = ['authentication']
if app_settings.QUERY_EMAIL:
default_scope.append('notify_email')
return default_scope
// ... rest of the code ...
|
3771d3165d4873592f53d8b2401806297fe2989f
|
door/models.py
|
door/models.py
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField(default=timezone.now)
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField(default=timezone.now)
closed = models.DateTimeField(default=timezone.now)
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
Remove default datetime in door
|
Remove default datetime in door
|
Python
|
mit
|
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
- datetime = models.DateTimeField(default=timezone.now)
+ datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
- opened = models.DateTimeField(default=timezone.now)
+ opened = models.DateTimeField()
- closed = models.DateTimeField(default=timezone.now)
+ closed = models.DateTimeField()
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
Remove default datetime in door
|
## Code Before:
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField(default=timezone.now)
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField(default=timezone.now)
closed = models.DateTimeField(default=timezone.now)
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
## Instruction:
Remove default datetime in door
## Code After:
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
// ... existing code ...
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
// ... modified code ...
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
total = models.IntegerField(default=0)
// ... rest of the code ...
|
4b46ecb6304527b38d0c2f8951b996f8d28f0bff
|
config/freetype2/__init__.py
|
config/freetype2/__init__.py
|
import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('pkg-config freetype2 --cflags')
except OSError:
pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
|
import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('pkg-config freetype2 --cflags')
except OSError:
try:
env.ParseConfig('freetype-config --cflags')
except OSError:
pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
|
Add fallback to freetype-config for compatibility.
|
Add fallback to freetype-config for compatibility.
|
Python
|
lgpl-2.1
|
CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang
|
import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('pkg-config freetype2 --cflags')
except OSError:
+ try:
+ env.ParseConfig('freetype-config --cflags')
+ except OSError:
- pass
+ pass
+
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
|
Add fallback to freetype-config for compatibility.
|
## Code Before:
import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('pkg-config freetype2 --cflags')
except OSError:
pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
## Instruction:
Add fallback to freetype-config for compatibility.
## Code After:
import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('pkg-config freetype2 --cflags')
except OSError:
try:
env.ParseConfig('freetype-config --cflags')
except OSError:
pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
|
// ... existing code ...
except OSError:
try:
env.ParseConfig('freetype-config --cflags')
except OSError:
pass
// ... rest of the code ...
|
e2c92e8b6e8fb10addc73986914014b278598470
|
spotpy/examples/spot_setup_standardnormal.py
|
spotpy/examples/spot_setup_standardnormal.py
|
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Rosenbrock function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
|
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
|
Fix docstring in standardnormal example
|
Fix docstring in standardnormal example
|
Python
|
mit
|
bees4ever/spotpy,bees4ever/spotpy,bees4ever/spotpy,thouska/spotpy,thouska/spotpy,thouska/spotpy
|
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
- This example implements the Rosenbrock function into SPOT.
+ This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
|
Fix docstring in standardnormal example
|
## Code Before:
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Rosenbrock function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
## Instruction:
Fix docstring in standardnormal example
## Code After:
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
|
# ... existing code ...
This example implements the Standard Normal function into SPOT.
'''
# ... rest of the code ...
|
1b5fc874924958664797ba2f1e73835b4cbcef57
|
mfr/__init__.py
|
mfr/__init__.py
|
"""The mfr core module."""
# -*- coding: utf-8 -*-
import os
__version__ = '0.1.0-alpha'
__author__ = 'Center for Open Science'
from mfr.core import (render, detect, FileHandler, get_file_extension,
register_filehandler, export, get_file_exporters,
config, collect_static
)
from mfr._config import Config
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
|
"""The mfr core module."""
# -*- coding: utf-8 -*-
import os
__version__ = '0.1.0-alpha'
__author__ = 'Center for Open Science'
from mfr.core import (
render,
detect,
FileHandler,
get_file_extension,
register_filehandler,
export,
get_file_exporters,
config,
collect_static,
RenderResult,
)
from mfr._config import Config
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
|
Add RenderResult to mfr namespace
|
Add RenderResult to mfr namespace
|
Python
|
apache-2.0
|
TomBaxter/modular-file-renderer,chrisseto/modular-file-renderer,mfraezz/modular-file-renderer,haoyuchen1992/modular-file-renderer,haoyuchen1992/modular-file-renderer,erinspace/modular-file-renderer,rdhyee/modular-file-renderer,CenterForOpenScience/modular-file-renderer,icereval/modular-file-renderer,TomBaxter/modular-file-renderer,erinspace/modular-file-renderer,Johnetordoff/modular-file-renderer,TomBaxter/modular-file-renderer,AddisonSchiller/modular-file-renderer,AddisonSchiller/modular-file-renderer,icereval/modular-file-renderer,Johnetordoff/modular-file-renderer,haoyuchen1992/modular-file-renderer,felliott/modular-file-renderer,Johnetordoff/modular-file-renderer,chrisseto/modular-file-renderer,felliott/modular-file-renderer,icereval/modular-file-renderer,CenterForOpenScience/modular-file-renderer,felliott/modular-file-renderer,haoyuchen1992/modular-file-renderer,mfraezz/modular-file-renderer,mfraezz/modular-file-renderer,mfraezz/modular-file-renderer,chrisseto/modular-file-renderer,AddisonSchiller/modular-file-renderer,rdhyee/modular-file-renderer,AddisonSchiller/modular-file-renderer,CenterForOpenScience/modular-file-renderer,erinspace/modular-file-renderer,rdhyee/modular-file-renderer,CenterForOpenScience/modular-file-renderer,Johnetordoff/modular-file-renderer,felliott/modular-file-renderer,TomBaxter/modular-file-renderer,rdhyee/modular-file-renderer
|
"""The mfr core module."""
# -*- coding: utf-8 -*-
import os
__version__ = '0.1.0-alpha'
__author__ = 'Center for Open Science'
- from mfr.core import (render, detect, FileHandler, get_file_extension,
- register_filehandler, export, get_file_exporters,
+ from mfr.core import (
+ render,
+ detect,
+ FileHandler,
+ get_file_extension,
+ register_filehandler,
+ export,
+ get_file_exporters,
+ config,
- config, collect_static
+ collect_static,
+ RenderResult,
)
from mfr._config import Config
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
|
Add RenderResult to mfr namespace
|
## Code Before:
"""The mfr core module."""
# -*- coding: utf-8 -*-
import os
__version__ = '0.1.0-alpha'
__author__ = 'Center for Open Science'
from mfr.core import (render, detect, FileHandler, get_file_extension,
register_filehandler, export, get_file_exporters,
config, collect_static
)
from mfr._config import Config
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
## Instruction:
Add RenderResult to mfr namespace
## Code After:
"""The mfr core module."""
# -*- coding: utf-8 -*-
import os
__version__ = '0.1.0-alpha'
__author__ = 'Center for Open Science'
from mfr.core import (
render,
detect,
FileHandler,
get_file_extension,
register_filehandler,
export,
get_file_exporters,
config,
collect_static,
RenderResult,
)
from mfr._config import Config
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
|
// ... existing code ...
from mfr.core import (
render,
detect,
FileHandler,
get_file_extension,
register_filehandler,
export,
get_file_exporters,
config,
collect_static,
RenderResult,
)
// ... rest of the code ...
|
209c0d0201b76a0f2db7d8b507b2eaa2df03fcae
|
lib/stats.py
|
lib/stats.py
|
from numpy import exp
from scipy.stats import rv_continuous
from scipy.special import gamma
class grw_gen(rv_continuous):
"""
Generalized Reverse Weibull distribution.
PDF:
a/gamma(g) * x^(a*g-1) * exp(-x^a)
for x,a,g >= 0
"""
def _pdf(self,x,a,g):
return a/gamma(g) * pow(x,a*g-1) * exp(-pow(x,a))
def _fitstart(self,data):
return (2.0,1.0,0.0,0.02)
grw = grw_gen(a=0.0, name='grw', shapes='a,g')
|
import numpy as np
from scipy.stats import gengamma, norm
"""
Set default starting parameters for fitting a generalized gamma distribution.
These parameters are sensible for ATLAS v_n distributions.
Order: (a, c, loc, scale) where a,c are shape params.
"""
gengamma._fitstart = lambda data: (1.0, 2.0, 0.0, 0.1)
def fit_file(fname,dist='gengamma',**kwargs):
"""
Fit a distribution to each column of a data file.
Arguments
---------
fname -- file name or object containing data columns to fit
dist -- distribution to fit, either 'gengamma' (default) or 'norm'
kwargs -- for np.loadtxt, except 'unpack' or 'ndmin' are ignored
Returns
-------
iterable of MLE parameters:
params_0, ... , params_N
for each column, where params are tuples of the form
(*shapes, loc, scale)
as produced by scipy.stats.rv_continuous.fit
"""
# remove 'unpack' and 'ndmin' kwargs if set
for key in ['unpack','ndmin']:
try:
del kwargs[key]
except KeyError:
pass
# read file
cols = np.loadtxt(fname,unpack=True,ndmin=2,**kwargs)
# set fitting distribution
try:
dist = eval(dist)
except NameError:
raise ValueError('invalid distribution: ' + dist)
return (dist.fit(c) for c in cols)
|
Replace custom GRW dist with scipy gengamma. Implement file fitting function.
|
Replace custom GRW dist with scipy gengamma. Implement file fitting function.
|
Python
|
mit
|
jbernhard/ebe-analysis
|
+ import numpy as np
- from numpy import exp
- from scipy.stats import rv_continuous
- from scipy.special import gamma
+ from scipy.stats import gengamma, norm
- class grw_gen(rv_continuous):
+ """
+ Set default starting parameters for fitting a generalized gamma distribution.
+
+ These parameters are sensible for ATLAS v_n distributions.
+
+ Order: (a, c, loc, scale) where a,c are shape params.
+ """
+ gengamma._fitstart = lambda data: (1.0, 2.0, 0.0, 0.1)
+
+
+ def fit_file(fname,dist='gengamma',**kwargs):
"""
- Generalized Reverse Weibull distribution.
+ Fit a distribution to each column of a data file.
- PDF:
-
- a/gamma(g) * x^(a*g-1) * exp(-x^a)
+ Arguments
+ ---------
+ fname -- file name or object containing data columns to fit
+ dist -- distribution to fit, either 'gengamma' (default) or 'norm'
+ kwargs -- for np.loadtxt, except 'unpack' or 'ndmin' are ignored
- for x,a,g >= 0
+ Returns
+ -------
+ iterable of MLE parameters:
+
+ params_0, ... , params_N
+
+ for each column, where params are tuples of the form
+
+ (*shapes, loc, scale)
+
+ as produced by scipy.stats.rv_continuous.fit
"""
- def _pdf(self,x,a,g):
- return a/gamma(g) * pow(x,a*g-1) * exp(-pow(x,a))
+ # remove 'unpack' and 'ndmin' kwargs if set
+ for key in ['unpack','ndmin']:
+ try:
+ del kwargs[key]
+ except KeyError:
+ pass
- def _fitstart(self,data):
- return (2.0,1.0,0.0,0.02)
+ # read file
+ cols = np.loadtxt(fname,unpack=True,ndmin=2,**kwargs)
- grw = grw_gen(a=0.0, name='grw', shapes='a,g')
+ # set fitting distribution
+ try:
+ dist = eval(dist)
+ except NameError:
+ raise ValueError('invalid distribution: ' + dist)
+ return (dist.fit(c) for c in cols)
+
|
Replace custom GRW dist with scipy gengamma. Implement file fitting function.
|
## Code Before:
from numpy import exp
from scipy.stats import rv_continuous
from scipy.special import gamma
class grw_gen(rv_continuous):
"""
Generalized Reverse Weibull distribution.
PDF:
a/gamma(g) * x^(a*g-1) * exp(-x^a)
for x,a,g >= 0
"""
def _pdf(self,x,a,g):
return a/gamma(g) * pow(x,a*g-1) * exp(-pow(x,a))
def _fitstart(self,data):
return (2.0,1.0,0.0,0.02)
grw = grw_gen(a=0.0, name='grw', shapes='a,g')
## Instruction:
Replace custom GRW dist with scipy gengamma. Implement file fitting function.
## Code After:
import numpy as np
from scipy.stats import gengamma, norm
"""
Set default starting parameters for fitting a generalized gamma distribution.
These parameters are sensible for ATLAS v_n distributions.
Order: (a, c, loc, scale) where a,c are shape params.
"""
gengamma._fitstart = lambda data: (1.0, 2.0, 0.0, 0.1)
def fit_file(fname,dist='gengamma',**kwargs):
"""
Fit a distribution to each column of a data file.
Arguments
---------
fname -- file name or object containing data columns to fit
dist -- distribution to fit, either 'gengamma' (default) or 'norm'
kwargs -- for np.loadtxt, except 'unpack' or 'ndmin' are ignored
Returns
-------
iterable of MLE parameters:
params_0, ... , params_N
for each column, where params are tuples of the form
(*shapes, loc, scale)
as produced by scipy.stats.rv_continuous.fit
"""
# remove 'unpack' and 'ndmin' kwargs if set
for key in ['unpack','ndmin']:
try:
del kwargs[key]
except KeyError:
pass
# read file
cols = np.loadtxt(fname,unpack=True,ndmin=2,**kwargs)
# set fitting distribution
try:
dist = eval(dist)
except NameError:
raise ValueError('invalid distribution: ' + dist)
return (dist.fit(c) for c in cols)
|
...
import numpy as np
from scipy.stats import gengamma, norm
...
"""
Set default starting parameters for fitting a generalized gamma distribution.
These parameters are sensible for ATLAS v_n distributions.
Order: (a, c, loc, scale) where a,c are shape params.
"""
gengamma._fitstart = lambda data: (1.0, 2.0, 0.0, 0.1)
def fit_file(fname,dist='gengamma',**kwargs):
"""
Fit a distribution to each column of a data file.
Arguments
---------
fname -- file name or object containing data columns to fit
dist -- distribution to fit, either 'gengamma' (default) or 'norm'
kwargs -- for np.loadtxt, except 'unpack' or 'ndmin' are ignored
Returns
-------
iterable of MLE parameters:
params_0, ... , params_N
for each column, where params are tuples of the form
(*shapes, loc, scale)
as produced by scipy.stats.rv_continuous.fit
...
# remove 'unpack' and 'ndmin' kwargs if set
for key in ['unpack','ndmin']:
try:
del kwargs[key]
except KeyError:
pass
# read file
cols = np.loadtxt(fname,unpack=True,ndmin=2,**kwargs)
# set fitting distribution
try:
dist = eval(dist)
except NameError:
raise ValueError('invalid distribution: ' + dist)
return (dist.fit(c) for c in cols)
...
|
cbdfc1b1cb4162256538576cabe2b6832aa83bca
|
django_mysqlpool/__init__.py
|
django_mysqlpool/__init__.py
|
from functools import wraps
from django.db import connection
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
from functools import wraps
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
from django.db import connection
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
Fix circular import when used with other add-ons that import django.db
|
Fix circular import when used with other add-ons that import django.db
eg sorl_thumbnail:
Traceback (most recent call last):
File "/home/rpatterson/src/work/retrans/src/ReTransDjango/bin/manage", line 40, in <module>
sys.exit(manage.main())
File "/home/rpatterson/src/work/retrans/src/ReTransDjango/retrans/manage.py", line 15, in main
execute_manager(settings)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/__init__.py", line 438, in execute_manager
utility.execute()
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/__init__.py", line 379, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/base.py", line 191, in run_from_argv
self.execute(*args, **options.__dict__)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/base.py", line 209, in execute
translation.activate('en-us')
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/__init__.py", line 100, in activate
return _trans.activate(language)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 202, in activate
_active.value = translation(language)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 185, in translation
default_translation = _fetch(settings.LANGUAGE_CODE)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 162, in _fetch
app = import_module(appname)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/importlib.py", line 35, in import_module
__import__(name)
File "/opt/src/eggs/sorl_thumbnail-11.12-py2.7.egg/sorl/thumbnail/__init__.py", line 1, in <module>
from sorl.thumbnail.fields import ImageField
File "/opt/src/eggs/sorl_thumbnail-11.12-py2.7.egg/sorl/thumbnail/fields.py", line 2, in <module>
from django.db import models
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/__init__.py", line 78, in <module>
connection = connections[DEFAULT_DB_ALIAS]
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/utils.py", line 94, in __getitem__
backend = load_backend(db['ENGINE'])
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/utils.py", line 47, in load_backend
if backend_name not in available_backends:
django.core.exceptions.ImproperlyConfigured: 'django_mysqlpool.backends.mysqlpool' isn't an available database backend.
Try using django.db.backends.XXX, where XXX is one of:
'dummy', 'mysql', 'oracle', 'postgresql', 'postgresql_psycopg2', 'sqlite3'
Error was: cannot import name connection
|
Python
|
mit
|
smartfile/django-mysqlpool
|
from functools import wraps
- from django.db import connection
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
+ from django.db import connection
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
Fix circular import when used with other add-ons that import django.db
|
## Code Before:
from functools import wraps
from django.db import connection
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
## Instruction:
Fix circular import when used with other add-ons that import django.db
## Code After:
from functools import wraps
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
from django.db import connection
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
// ... existing code ...
from functools import wraps
// ... modified code ...
"Ensures the database connection is closed when the function returns."
from django.db import connection
@wraps(f)
// ... rest of the code ...
|
924bee7b0a8b11aa0f1506584966533924d29478
|
django_hash_filter/templatetags/hash_filter.py
|
django_hash_filter/templatetags/hash_filter.py
|
from django import template
from django.template.defaultfilters import stringfilter
from django.template.base import TemplateSyntaxError
import hashlib
from django_hash_filter.templatetags import get_available_hashes
register = template.Library()
@register.filter
@stringfilter
def hash(value, arg):
"""
Returns a hex-digest of the passed in value for the hash algorithm given.
"""
arg = str(arg).lower()
if not arg in get_available_hashes():
raise TemplateSyntaxError("The %s hash algorithm does not exist." % arg)
try:
f = getattr(hashlib, arg)
hashed = f(value).hexdigest()
except Exception:
raise ValueError("The %s hash algorithm cannot produce a hex digest. Ensure that OpenSSL is properly installed." % arg)
return hashed
|
from django import template
from django.template.defaultfilters import stringfilter
from django.template.base import TemplateSyntaxError
import hashlib
from django_hash_filter.templatetags import get_available_hashes
register = template.Library()
@register.filter
@stringfilter
def hash(value, arg):
"""
Returns a hex-digest of the passed in value for the hash algorithm given.
"""
arg = str(arg).lower()
if not arg in get_available_hashes():
raise TemplateSyntaxError("The %s hash algorithm does not exist. Supported algorithms are: %" % (arg, get_available_hashes()))
try:
f = getattr(hashlib, arg)
hashed = f(value).hexdigest()
except Exception:
raise ValueError("The %s hash algorithm cannot produce a hex digest. Ensure that OpenSSL is properly installed." % arg)
return hashed
|
Add helpful text to template error
|
Add helpful text to template error
|
Python
|
mit
|
andrewjsledge/django-hash-filter
|
from django import template
from django.template.defaultfilters import stringfilter
from django.template.base import TemplateSyntaxError
import hashlib
from django_hash_filter.templatetags import get_available_hashes
register = template.Library()
@register.filter
@stringfilter
def hash(value, arg):
"""
Returns a hex-digest of the passed in value for the hash algorithm given.
"""
arg = str(arg).lower()
if not arg in get_available_hashes():
- raise TemplateSyntaxError("The %s hash algorithm does not exist." % arg)
+ raise TemplateSyntaxError("The %s hash algorithm does not exist. Supported algorithms are: %" % (arg, get_available_hashes()))
try:
f = getattr(hashlib, arg)
hashed = f(value).hexdigest()
except Exception:
raise ValueError("The %s hash algorithm cannot produce a hex digest. Ensure that OpenSSL is properly installed." % arg)
return hashed
+
|
Add helpful text to template error
|
## Code Before:
from django import template
from django.template.defaultfilters import stringfilter
from django.template.base import TemplateSyntaxError
import hashlib
from django_hash_filter.templatetags import get_available_hashes
register = template.Library()
@register.filter
@stringfilter
def hash(value, arg):
"""
Returns a hex-digest of the passed in value for the hash algorithm given.
"""
arg = str(arg).lower()
if not arg in get_available_hashes():
raise TemplateSyntaxError("The %s hash algorithm does not exist." % arg)
try:
f = getattr(hashlib, arg)
hashed = f(value).hexdigest()
except Exception:
raise ValueError("The %s hash algorithm cannot produce a hex digest. Ensure that OpenSSL is properly installed." % arg)
return hashed
## Instruction:
Add helpful text to template error
## Code After:
from django import template
from django.template.defaultfilters import stringfilter
from django.template.base import TemplateSyntaxError
import hashlib
from django_hash_filter.templatetags import get_available_hashes
register = template.Library()
@register.filter
@stringfilter
def hash(value, arg):
"""
Returns a hex-digest of the passed in value for the hash algorithm given.
"""
arg = str(arg).lower()
if not arg in get_available_hashes():
raise TemplateSyntaxError("The %s hash algorithm does not exist. Supported algorithms are: %" % (arg, get_available_hashes()))
try:
f = getattr(hashlib, arg)
hashed = f(value).hexdigest()
except Exception:
raise ValueError("The %s hash algorithm cannot produce a hex digest. Ensure that OpenSSL is properly installed." % arg)
return hashed
|
// ... existing code ...
if not arg in get_available_hashes():
raise TemplateSyntaxError("The %s hash algorithm does not exist. Supported algorithms are: %" % (arg, get_available_hashes()))
try:
// ... rest of the code ...
|
02e4a051e6e463d06195e9efe6a25c84cc046b55
|
tests/base.py
|
tests/base.py
|
import unittest
from app import create_app, db
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = {
"username": "brian",
"password": "password"
}
with self.app.app_context():
db.create_all()
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
|
import unittest
import json
from app import create_app, db
from app.models import User
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = json.dumps({
"username": "brian",
"password": "password"
})
with self.app.app_context():
db.create_all()
def set_headers(self):
""" Set headers for Authorization and Content Type. """
self.client.post("/auth/register",
data=self.user,
content_type='application/json')
response = self.client.post( "/auth/login",
data=self.user,
content_type='application/json')
payload = json.loads(response.data.decode())
# get the token from the reponse body
self.token = payload['token']
return dict({
'Authorization': self.token,
'Content-Type': 'application/json',
})
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
|
Add authorization and content-type headers to request for tests
|
[CHORE] Add authorization and content-type headers to request for tests
|
Python
|
mit
|
brayoh/bucket-list-api
|
import unittest
+ import json
from app import create_app, db
+ from app.models import User
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
- self.user = {
+ self.user = json.dumps({
"username": "brian",
"password": "password"
- }
+ })
with self.app.app_context():
db.create_all()
+
+ def set_headers(self):
+ """ Set headers for Authorization and Content Type. """
+ self.client.post("/auth/register",
+ data=self.user,
+ content_type='application/json')
+
+ response = self.client.post( "/auth/login",
+ data=self.user,
+ content_type='application/json')
+
+ payload = json.loads(response.data.decode())
+
+ # get the token from the reponse body
+ self.token = payload['token']
+
+ return dict({
+ 'Authorization': self.token,
+ 'Content-Type': 'application/json',
+ })
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
|
Add authorization and content-type headers to request for tests
|
## Code Before:
import unittest
from app import create_app, db
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = {
"username": "brian",
"password": "password"
}
with self.app.app_context():
db.create_all()
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
## Instruction:
Add authorization and content-type headers to request for tests
## Code After:
import unittest
import json
from app import create_app, db
from app.models import User
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = json.dumps({
"username": "brian",
"password": "password"
})
with self.app.app_context():
db.create_all()
def set_headers(self):
""" Set headers for Authorization and Content Type. """
self.client.post("/auth/register",
data=self.user,
content_type='application/json')
response = self.client.post( "/auth/login",
data=self.user,
content_type='application/json')
payload = json.loads(response.data.decode())
# get the token from the reponse body
self.token = payload['token']
return dict({
'Authorization': self.token,
'Content-Type': 'application/json',
})
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
|
// ... existing code ...
import unittest
import json
from app import create_app, db
from app.models import User
// ... modified code ...
self.client = self.app.test_client()
self.user = json.dumps({
"username": "brian",
...
"password": "password"
})
...
db.create_all()
def set_headers(self):
""" Set headers for Authorization and Content Type. """
self.client.post("/auth/register",
data=self.user,
content_type='application/json')
response = self.client.post( "/auth/login",
data=self.user,
content_type='application/json')
payload = json.loads(response.data.decode())
# get the token from the reponse body
self.token = payload['token']
return dict({
'Authorization': self.token,
'Content-Type': 'application/json',
})
// ... rest of the code ...
|
4dabc48455ebb8f22d37cd964ceb16373f784362
|
mothermayi/colors.py
|
mothermayi/colors.py
|
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
|
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
def yellow(text):
return YELLOW + text + ENDC
|
Add a function for getting yellow
|
Add a function for getting yellow
|
Python
|
mit
|
EliRibble/mothermayi
|
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
+ def yellow(text):
+ return YELLOW + text + ENDC
+
|
Add a function for getting yellow
|
## Code Before:
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
## Instruction:
Add a function for getting yellow
## Code After:
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
def yellow(text):
return YELLOW + text + ENDC
|
...
return RED + text + ENDC
def yellow(text):
return YELLOW + text + ENDC
...
|
8c3782e676e27bf6b3512ea390ad789698ba331c
|
memegen/routes/_cache.py
|
memegen/routes/_cache.py
|
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom':
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
|
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom' or kwargs in self.items:
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
|
Disable caching of identical images
|
Disable caching of identical images
|
Python
|
mit
|
DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen
|
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
- if kwargs['key'] == 'custom':
+ if kwargs['key'] == 'custom' or kwargs in self.items:
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
|
Disable caching of identical images
|
## Code Before:
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom':
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
## Instruction:
Disable caching of identical images
## Code After:
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom' or kwargs in self.items:
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
|
// ... existing code ...
def add(self, **kwargs):
if kwargs['key'] == 'custom' or kwargs in self.items:
return
// ... rest of the code ...
|
945aba9548b92f57fc25f9996bfa9c3811e64deb
|
server/resources.py
|
server/resources.py
|
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
db_lectures = Lecture.query.filter(Lecture.id == lecture_id).all()
if not db_lectures:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
lecture = db_lectures[0]
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
Change single Lecture query to use first() in stead of all()
|
Change single Lecture query to use first() in stead of all()
|
Python
|
mit
|
MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS
|
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
- db_lectures = Lecture.query.filter(Lecture.id == lecture_id).all()
+ lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
- if not db_lectures:
+ if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
- lecture = db_lectures[0]
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
Change single Lecture query to use first() in stead of all()
|
## Code Before:
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
db_lectures = Lecture.query.filter(Lecture.id == lecture_id).all()
if not db_lectures:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
lecture = db_lectures[0]
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
## Instruction:
Change single Lecture query to use first() in stead of all()
## Code After:
from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
// ... existing code ...
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
// ... modified code ...
content = args.data
// ... rest of the code ...
|
ee494fd205c58029960d4a5702f59418c8110eb3
|
django_iceberg/context_processors.py
|
django_iceberg/context_processors.py
|
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
|
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
res['username'] = init_iceberg(request).username
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
|
Add username to context in iceberg_settings context processor
|
Add username to context in iceberg_settings context processor
|
Python
|
mit
|
izberg-marketplace/django-izberg,izberg-marketplace/django-izberg,Iceberg-Marketplace/django-iceberg,Iceberg-Marketplace/django-iceberg
|
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
+ res['username'] = init_iceberg(request).username
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
|
Add username to context in iceberg_settings context processor
|
## Code Before:
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
## Instruction:
Add username to context in iceberg_settings context processor
## Code After:
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
res['username'] = init_iceberg(request).username
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
|
# ... existing code ...
res['access_token'] = init_iceberg(request).access_token
res['username'] = init_iceberg(request).username
else:
# ... rest of the code ...
|
2da3f9cf12c340322f512585711ebc02097c72a1
|
tests/views/test_calls_for_comments_page.py
|
tests/views/test_calls_for_comments_page.py
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
self.assertTrue(False)
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
|
Remove false assertion from test
|
Remove false assertion from test
|
Python
|
apache-2.0
|
Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
- self.assertTrue(False)
|
Remove false assertion from test
|
## Code Before:
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
self.assertTrue(False)
## Instruction:
Remove false assertion from test
## Code After:
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
|
# ... existing code ...
self.assertIn(call_for_comment.title, self.html)
# ... rest of the code ...
|
cf550ac3a00531f2f964fbbb7e27c37071983d26
|
utils/aiohttp_wrap.py
|
utils/aiohttp_wrap.py
|
import aiohttp
async def aio_get(url: str):
async with aiohttp.ClientSession() as session:
<<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
=======
async with session.get(url) as r:
if r.status == 200:
return r
else:
return None
>>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
|
import aiohttp
async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
|
Revert "Revert "progress on DDG cog & aiohttp wrapper""
|
Revert "Revert "progress on DDG cog & aiohttp wrapper""
This reverts commit 85d3b1203d9861f986356e593a2b79d96c38c1b3.
|
Python
|
mit
|
Naught0/qtbot
|
import aiohttp
- async def aio_get(url: str):
+ async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
- <<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
- =======
- async with session.get(url) as r:
- if r.status == 200:
- return r
- else:
- return None
- >>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
|
Revert "Revert "progress on DDG cog & aiohttp wrapper""
|
## Code Before:
import aiohttp
async def aio_get(url: str):
async with aiohttp.ClientSession() as session:
<<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
=======
async with session.get(url) as r:
if r.status == 200:
return r
else:
return None
>>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
## Instruction:
Revert "Revert "progress on DDG cog & aiohttp wrapper""
## Code After:
import aiohttp
async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
|
...
async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
...
return None
...
|
7cc968f90407745b84bd2f663e5f64b9c0923605
|
project/manage.py
|
project/manage.py
|
import os
import sys
import environ
if __name__ == "__main__":
if os.path.isfile('.env'):
environ.Env.read_env('.env')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
import os
import sys
import environ
ROOT_DIR = environ.Path(__file__) - 1
if __name__ == "__main__":
if os.path.isfile(str(ROOT_DIR + '.env')):
environ.Env.read_env(str(ROOT_DIR + '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Use full path in case the working dir is not the same
|
Use full path in case the working dir is not the same
|
Python
|
mit
|
hacklab-fi/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,rambo/asylum,jautero/asylum,rambo/asylum,rambo/asylum,rambo/asylum,jautero/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum
|
import os
import sys
import environ
+ ROOT_DIR = environ.Path(__file__) - 1
if __name__ == "__main__":
- if os.path.isfile('.env'):
+ if os.path.isfile(str(ROOT_DIR + '.env')):
- environ.Env.read_env('.env')
+ environ.Env.read_env(str(ROOT_DIR + '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Use full path in case the working dir is not the same
|
## Code Before:
import os
import sys
import environ
if __name__ == "__main__":
if os.path.isfile('.env'):
environ.Env.read_env('.env')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
## Instruction:
Use full path in case the working dir is not the same
## Code After:
import os
import sys
import environ
ROOT_DIR = environ.Path(__file__) - 1
if __name__ == "__main__":
if os.path.isfile(str(ROOT_DIR + '.env')):
environ.Env.read_env(str(ROOT_DIR + '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
# ... existing code ...
import environ
ROOT_DIR = environ.Path(__file__) - 1
# ... modified code ...
if __name__ == "__main__":
if os.path.isfile(str(ROOT_DIR + '.env')):
environ.Env.read_env(str(ROOT_DIR + '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
# ... rest of the code ...
|
6bc11ea44c07cddd567a5039b9442a95e9ce04fe
|
comics/crawler/utils/lxmlparser.py
|
comics/crawler/utils/lxmlparser.py
|
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string:
self.root = fromstring(string)
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class DoesNotExist(Exception):
pass
class MultipleElementsReturned(Exception):
pass
|
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url is not None:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string is not None:
self.root = fromstring(string)
else:
raise LxmlParserException()
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class LxmlParserException(Exception):
pass
class DoesNotExist(LxmlParserException):
pass
class MultipleElementsReturned(LxmlParserException):
pass
|
Update exception handling in LxmlParser
|
Update exception handling in LxmlParser
|
Python
|
agpl-3.0
|
datagutten/comics,klette/comics,klette/comics,jodal/comics,jodal/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics
|
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
- if url:
+ if url is not None:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
- elif string:
+ elif string is not None:
self.root = fromstring(string)
+ else:
+ raise LxmlParserException()
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
- class DoesNotExist(Exception):
+ class LxmlParserException(Exception):
pass
- class MultipleElementsReturned(Exception):
+ class DoesNotExist(LxmlParserException):
pass
+ class MultipleElementsReturned(LxmlParserException):
+ pass
+
|
Update exception handling in LxmlParser
|
## Code Before:
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string:
self.root = fromstring(string)
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class DoesNotExist(Exception):
pass
class MultipleElementsReturned(Exception):
pass
## Instruction:
Update exception handling in LxmlParser
## Code After:
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url is not None:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string is not None:
self.root = fromstring(string)
else:
raise LxmlParserException()
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class LxmlParserException(Exception):
pass
class DoesNotExist(LxmlParserException):
pass
class MultipleElementsReturned(LxmlParserException):
pass
|
...
def __init__(self, url=None, string=None):
if url is not None:
self.root = parse(url).getroot()
...
self.root.make_links_absolute(url)
elif string is not None:
self.root = fromstring(string)
else:
raise LxmlParserException()
...
class LxmlParserException(Exception):
pass
...
class DoesNotExist(LxmlParserException):
pass
class MultipleElementsReturned(LxmlParserException):
pass
...
|
04cca2c87cc8e56ecd84e1b3125a7a7b8c67b026
|
norc_utils/backup.py
|
norc_utils/backup.py
|
import os
from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
import os
from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
from norc.settings import (AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
|
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
|
Python
|
bsd-3-clause
|
darrellsilver/norc,darrellsilver/norc
|
import os
- from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
+ from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
- AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
+ from norc.settings import (AWS_ACCESS_KEY_ID,
+ AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
|
## Code Before:
import os
from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
## Instruction:
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
## Code After:
import os
from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
from norc.settings import (AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
// ... existing code ...
from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
// ... modified code ...
from norc.norc_utils.aws import set_s3_key
from norc.settings import (AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
// ... rest of the code ...
|
09195f50e328d3aee4cc60f0702d8605ea520eb3
|
tests/sentry/utils/models/tests.py
|
tests/sentry/utils/models/tests.py
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
self.assertFalse(inst.has_changed('foo'))
|
Add missing assertion in test
|
Add missing assertion in test
|
Python
|
bsd-3-clause
|
NickPresta/sentry,jokey2k/sentry,1tush/sentry,zenefits/sentry,SilentCircle/sentry,wujuguang/sentry,ifduyue/sentry,Kryz/sentry,JamesMura/sentry,Natim/sentry,NickPresta/sentry,BuildingLink/sentry,rdio/sentry,BuildingLink/sentry,ngonzalvez/sentry,JamesMura/sentry,mvaled/sentry,JackDanger/sentry,SilentCircle/sentry,ifduyue/sentry,beeftornado/sentry,hongliang5623/sentry,nicholasserra/sentry,NickPresta/sentry,camilonova/sentry,fotinakis/sentry,jokey2k/sentry,fuziontech/sentry,BuildingLink/sentry,korealerts1/sentry,looker/sentry,wong2/sentry,ifduyue/sentry,BayanGroup/sentry,gencer/sentry,beni55/sentry,mvaled/sentry,SilentCircle/sentry,wujuguang/sentry,imankulov/sentry,jean/sentry,JTCunning/sentry,1tush/sentry,looker/sentry,songyi199111/sentry,zenefits/sentry,jean/sentry,1tush/sentry,ngonzalvez/sentry,TedaLIEz/sentry,daevaorn/sentry,drcapulet/sentry,NickPresta/sentry,BayanGroup/sentry,BuildingLink/sentry,JackDanger/sentry,camilonova/sentry,vperron/sentry,beeftornado/sentry,gencer/sentry,kevinastone/sentry,nicholasserra/sentry,mitsuhiko/sentry,Natim/sentry,looker/sentry,mvaled/sentry,camilonova/sentry,ewdurbin/sentry,korealerts1/sentry,songyi199111/sentry,Kryz/sentry,alexm92/sentry,Natim/sentry,daevaorn/sentry,argonemyth/sentry,ifduyue/sentry,rdio/sentry,kevinastone/sentry,gencer/sentry,jean/sentry,daevaorn/sentry,argonemyth/sentry,ewdurbin/sentry,imankulov/sentry,wujuguang/sentry,fotinakis/sentry,argonemyth/sentry,imankulov/sentry,TedaLIEz/sentry,vperron/sentry,drcapulet/sentry,songyi199111/sentry,BayanGroup/sentry,daevaorn/sentry,kevinlondon/sentry,gg7/sentry,beni55/sentry,gencer/sentry,vperron/sentry,rdio/sentry,pauloschilling/sentry,mvaled/sentry,mvaled/sentry,llonchj/sentry,boneyao/sentry,Kryz/sentry,gg7/sentry,BuildingLink/sentry,jean/sentry,looker/sentry,pauloschilling/sentry,jean/sentry,mitsuhiko/sentry,zenefits/sentry,zenefits/sentry,alexm92/sentry,pauloschilling/sentry,alexm92/sentry,hongliang5623/sentry,mvaled/sentry,wong2/sentry,JTCunning/sentry,beeftornado/sentry,beni55/sentry,JamesMura/sentry,jokey2k/sentry,fuziontech/sentry,korealerts1/sentry,JamesMura/sentry,JTCunning/sentry,llonchj/sentry,JamesMura/sentry,JackDanger/sentry,drcapulet/sentry,fuziontech/sentry,kevinlondon/sentry,gg7/sentry,zenefits/sentry,boneyao/sentry,felixbuenemann/sentry,SilentCircle/sentry,kevinastone/sentry,TedaLIEz/sentry,looker/sentry,rdio/sentry,felixbuenemann/sentry,boneyao/sentry,ewdurbin/sentry,gencer/sentry,ngonzalvez/sentry,ifduyue/sentry,felixbuenemann/sentry,fotinakis/sentry,fotinakis/sentry,nicholasserra/sentry,hongliang5623/sentry,llonchj/sentry,wong2/sentry,kevinlondon/sentry
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
+ self.assertFalse(inst.has_changed('foo'))
|
Add missing assertion in test
|
## Code Before:
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
## Instruction:
Add missing assertion in test
## Code After:
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
self.assertFalse(inst.has_changed('foo'))
|
// ... existing code ...
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
self.assertFalse(inst.has_changed('foo'))
// ... rest of the code ...
|
a7d8d2f95acbf801c0cc8b0f2a8cc008f6cb34c0
|
rouver/types.py
|
rouver/types.py
|
from __future__ import annotations
from collections.abc import Iterable, Mapping
from typing import Any, Callable, Dict, Tuple
from typing_extensions import TypeAlias
from werkzeug.wrappers import Request
# (name, value)
Header: TypeAlias = Tuple[str, str]
WSGIEnvironment: TypeAlias = Dict[str, Any]
# (body) -> None
StartResponseReturnType: TypeAlias = Callable[[bytes], object]
# (status: str, headers: List[Headers], exc_info) -> response
StartResponse: TypeAlias = Callable[..., StartResponseReturnType]
WSGIResponse: TypeAlias = Iterable[bytes]
WSGIApplication: TypeAlias = Callable[
[WSGIEnvironment, StartResponse], WSGIResponse
]
# (method, path, callback)
RouteDescription: TypeAlias = Tuple[str, str, WSGIApplication]
# (request, previous_args, path_part) -> result
RouteTemplateHandler: TypeAlias = Callable[
[Request, Tuple[Any, ...], str], Any
]
BadArgumentsDict: TypeAlias = Mapping[str, str]
|
from __future__ import annotations
from typing import Any, Callable, Dict, Iterable, Mapping, Tuple
from typing_extensions import TypeAlias
from werkzeug.wrappers import Request
# (name, value)
Header: TypeAlias = Tuple[str, str]
WSGIEnvironment: TypeAlias = Dict[str, Any]
# (body) -> None
StartResponseReturnType: TypeAlias = Callable[[bytes], object]
# (status: str, headers: List[Headers], exc_info) -> response
StartResponse: TypeAlias = Callable[..., StartResponseReturnType]
WSGIResponse: TypeAlias = Iterable[bytes]
WSGIApplication: TypeAlias = Callable[
[WSGIEnvironment, StartResponse], WSGIResponse
]
# (method, path, callback)
RouteDescription: TypeAlias = Tuple[str, str, WSGIApplication]
# (request, previous_args, path_part) -> result
RouteTemplateHandler: TypeAlias = Callable[
[Request, Tuple[Any, ...], str], Any
]
BadArgumentsDict: TypeAlias = Mapping[str, str]
|
Fix imports on Python <= 3.8
|
Fix imports on Python <= 3.8
|
Python
|
mit
|
srittau/rouver
|
from __future__ import annotations
- from collections.abc import Iterable, Mapping
- from typing import Any, Callable, Dict, Tuple
+ from typing import Any, Callable, Dict, Iterable, Mapping, Tuple
from typing_extensions import TypeAlias
from werkzeug.wrappers import Request
# (name, value)
Header: TypeAlias = Tuple[str, str]
WSGIEnvironment: TypeAlias = Dict[str, Any]
# (body) -> None
StartResponseReturnType: TypeAlias = Callable[[bytes], object]
# (status: str, headers: List[Headers], exc_info) -> response
StartResponse: TypeAlias = Callable[..., StartResponseReturnType]
WSGIResponse: TypeAlias = Iterable[bytes]
WSGIApplication: TypeAlias = Callable[
[WSGIEnvironment, StartResponse], WSGIResponse
]
# (method, path, callback)
RouteDescription: TypeAlias = Tuple[str, str, WSGIApplication]
# (request, previous_args, path_part) -> result
RouteTemplateHandler: TypeAlias = Callable[
[Request, Tuple[Any, ...], str], Any
]
BadArgumentsDict: TypeAlias = Mapping[str, str]
|
Fix imports on Python <= 3.8
|
## Code Before:
from __future__ import annotations
from collections.abc import Iterable, Mapping
from typing import Any, Callable, Dict, Tuple
from typing_extensions import TypeAlias
from werkzeug.wrappers import Request
# (name, value)
Header: TypeAlias = Tuple[str, str]
WSGIEnvironment: TypeAlias = Dict[str, Any]
# (body) -> None
StartResponseReturnType: TypeAlias = Callable[[bytes], object]
# (status: str, headers: List[Headers], exc_info) -> response
StartResponse: TypeAlias = Callable[..., StartResponseReturnType]
WSGIResponse: TypeAlias = Iterable[bytes]
WSGIApplication: TypeAlias = Callable[
[WSGIEnvironment, StartResponse], WSGIResponse
]
# (method, path, callback)
RouteDescription: TypeAlias = Tuple[str, str, WSGIApplication]
# (request, previous_args, path_part) -> result
RouteTemplateHandler: TypeAlias = Callable[
[Request, Tuple[Any, ...], str], Any
]
BadArgumentsDict: TypeAlias = Mapping[str, str]
## Instruction:
Fix imports on Python <= 3.8
## Code After:
from __future__ import annotations
from typing import Any, Callable, Dict, Iterable, Mapping, Tuple
from typing_extensions import TypeAlias
from werkzeug.wrappers import Request
# (name, value)
Header: TypeAlias = Tuple[str, str]
WSGIEnvironment: TypeAlias = Dict[str, Any]
# (body) -> None
StartResponseReturnType: TypeAlias = Callable[[bytes], object]
# (status: str, headers: List[Headers], exc_info) -> response
StartResponse: TypeAlias = Callable[..., StartResponseReturnType]
WSGIResponse: TypeAlias = Iterable[bytes]
WSGIApplication: TypeAlias = Callable[
[WSGIEnvironment, StartResponse], WSGIResponse
]
# (method, path, callback)
RouteDescription: TypeAlias = Tuple[str, str, WSGIApplication]
# (request, previous_args, path_part) -> result
RouteTemplateHandler: TypeAlias = Callable[
[Request, Tuple[Any, ...], str], Any
]
BadArgumentsDict: TypeAlias = Mapping[str, str]
|
# ... existing code ...
from typing import Any, Callable, Dict, Iterable, Mapping, Tuple
# ... rest of the code ...
|
c32e87894d4baf404d5b300459fc68a6d9d973c8
|
zun/db/__init__.py
|
zun/db/__init__.py
|
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')
|
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
|
Remove the duplicated config sqlite_db
|
Remove the duplicated config sqlite_db
The config sqlite_db has been removed from oslo.db. See here:
https://review.openstack.org/#/c/449437/
Change-Id: I9197b08aeb7baabf2d3fdd4cf4bd06b57a6782ff
|
Python
|
apache-2.0
|
kevin-zhaoshuai/zun,kevin-zhaoshuai/zun,kevin-zhaoshuai/zun
|
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
- options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')
+ options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
|
Remove the duplicated config sqlite_db
|
## Code Before:
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')
## Instruction:
Remove the duplicated config sqlite_db
## Code After:
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
|
// ... existing code ...
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
// ... rest of the code ...
|
662287761b8549a86d3fb8c05ec37d47491da120
|
flatblocks/urls.py
|
flatblocks/urls.py
|
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
re_path("^edit/(?P<pk>\d+)/$", staff_member_required(edit), name="flatblocks-edit"),
]
|
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
re_path(
r"^edit/(?P<pk>\d+)/$",
staff_member_required(edit),
name="flatblocks-edit",
),
]
|
Use raw string notation for regular expression.
|
Use raw string notation for regular expression.
|
Python
|
bsd-3-clause
|
funkybob/django-flatblocks,funkybob/django-flatblocks
|
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
- re_path("^edit/(?P<pk>\d+)/$", staff_member_required(edit), name="flatblocks-edit"),
+ re_path(
+ r"^edit/(?P<pk>\d+)/$",
+ staff_member_required(edit),
+ name="flatblocks-edit",
+ ),
]
|
Use raw string notation for regular expression.
|
## Code Before:
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
re_path("^edit/(?P<pk>\d+)/$", staff_member_required(edit), name="flatblocks-edit"),
]
## Instruction:
Use raw string notation for regular expression.
## Code After:
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
re_path(
r"^edit/(?P<pk>\d+)/$",
staff_member_required(edit),
name="flatblocks-edit",
),
]
|
// ... existing code ...
urlpatterns = [
re_path(
r"^edit/(?P<pk>\d+)/$",
staff_member_required(edit),
name="flatblocks-edit",
),
]
// ... rest of the code ...
|
52b6dac7528232dfd41841f4697c7a78e2a2e675
|
www/src/Lib/_weakref.py
|
www/src/Lib/_weakref.py
|
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
|
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def __call__(self):
return self.obj.obj
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
|
Add method __call__ to _weaksetref.WeakSet
|
Add method __call__ to _weaksetref.WeakSet
|
Python
|
bsd-3-clause
|
olemis/brython,Lh4cKg/brython,molebot/brython,kikocorreoso/brython,Isendir/brython,Mozhuowen/brython,Isendir/brython,amrdraz/brython,Hasimir/brython,olemis/brython,firmlyjin/brython,JohnDenker/brython,olemis/brython,firmlyjin/brython,Mozhuowen/brython,jonathanverner/brython,molebot/brython,jonathanverner/brython,kevinmel2000/brython,Hasimir/brython,Lh4cKg/brython,JohnDenker/brython,firmlyjin/brython,firmlyjin/brython,Hasimir/brython,amrdraz/brython,Hasimir/brython,molebot/brython,kevinmel2000/brython,rubyinhell/brython,Mozhuowen/brython,Isendir/brython,kikocorreoso/brython,brython-dev/brython,rubyinhell/brython,Lh4cKg/brython,JohnDenker/brython,JohnDenker/brython,rubyinhell/brython,rubyinhell/brython,molebot/brython,jonathanverner/brython,kevinmel2000/brython,Lh4cKg/brython,kevinmel2000/brython,brython-dev/brython,kikocorreoso/brython,jonathanverner/brython,Mozhuowen/brython,firmlyjin/brython,amrdraz/brython,Isendir/brython,amrdraz/brython,olemis/brython,brython-dev/brython
|
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
+ def __call__(self):
+ return self.obj.obj
+
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
|
Add method __call__ to _weaksetref.WeakSet
|
## Code Before:
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
## Instruction:
Add method __call__ to _weaksetref.WeakSet
## Code After:
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def __call__(self):
return self.obj.obj
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
|
# ... existing code ...
def __call__(self):
return self.obj.obj
def getweakrefcount(obj):
# ... rest of the code ...
|
9f8b0fe642e7900ac1c966ad365a5fe99456d23e
|
setup.py
|
setup.py
|
from distutils.core import setup
from jsonref import __version__
with open("README.rst") as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
setup(
name="jsonref",
version=__version__,
py_modules=["jsonref", "proxytypes"],
author="Chase Sterling",
author_email="[email protected]",
classifiers=classifiers,
description="An implementation of JSON Reference for Python",
license="MIT",
long_description=long_description,
url="http://github.com/gazpachoking/jsonref",
)
|
from distutils.core import setup
from jsonref import __version__
with open("README.rst") as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
setup(
name="jsonref",
version=__version__,
py_modules=["jsonref", "proxytypes"],
author="Chase Sterling",
author_email="[email protected]",
classifiers=classifiers,
description="An implementation of JSON Reference for Python",
license="MIT",
long_description=long_description,
url="http://github.com/gazpachoking/jsonref",
)
|
Include Python 3.4 and 3.5
|
Include Python 3.4 and 3.5
|
Python
|
mit
|
gazpachoking/jsonref
|
from distutils.core import setup
from jsonref import __version__
with open("README.rst") as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
setup(
name="jsonref",
version=__version__,
py_modules=["jsonref", "proxytypes"],
author="Chase Sterling",
author_email="[email protected]",
classifiers=classifiers,
description="An implementation of JSON Reference for Python",
license="MIT",
long_description=long_description,
url="http://github.com/gazpachoking/jsonref",
)
|
Include Python 3.4 and 3.5
|
## Code Before:
from distutils.core import setup
from jsonref import __version__
with open("README.rst") as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
setup(
name="jsonref",
version=__version__,
py_modules=["jsonref", "proxytypes"],
author="Chase Sterling",
author_email="[email protected]",
classifiers=classifiers,
description="An implementation of JSON Reference for Python",
license="MIT",
long_description=long_description,
url="http://github.com/gazpachoking/jsonref",
)
## Instruction:
Include Python 3.4 and 3.5
## Code After:
from distutils.core import setup
from jsonref import __version__
with open("README.rst") as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
setup(
name="jsonref",
version=__version__,
py_modules=["jsonref", "proxytypes"],
author="Chase Sterling",
author_email="[email protected]",
classifiers=classifiers,
description="An implementation of JSON Reference for Python",
license="MIT",
long_description=long_description,
url="http://github.com/gazpachoking/jsonref",
)
|
// ... existing code ...
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
// ... rest of the code ...
|
8b351036f6431bd760565b23d9e887e7d8a73840
|
mysql_statsd/thread_manager.py
|
mysql_statsd/thread_manager.py
|
import Queue
import signal
import threading
import time
class ThreadManager():
"""Knows how to manage dem threads"""
quit = False
quitting = False
threads = []
def __init__(self, queue=Queue.Queue(), threads=[], config={}):
"""Program entry point"""
# Set up queue
self.queue = Queue.Queue()
self.config = config
self.threads = threads
self.register_signal_handlers()
def register_signal_handlers(self):
# Register signal handler
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
def run(self):
# Main loop
self.start_threads()
while not self.quit:
time.sleep(1)
def start_threads(self):
for t in self.threads:
t.start()
def signal_handler(self, signal, frame):
""" Handle signals """
print("Caught CTRL+C / SIGKILL")
if not self.quitting:
self.quitting = True
self.stop_threads()
self.quit = True
else:
print("BE PATIENT!@#~!#!@#$~!`1111")
def stop_threads(self):
"""Stops all threads and waits for them to quit"""
print("Stopping threads")
for thread in self.threads:
thread.stop()
while threading.activeCount() > 1:
print("Waiting for %s threads" % threading.activeCount())
time.sleep(1)
print("All threads stopped")
|
import Queue
import signal
import threading
import time
class ThreadManager():
"""Knows how to manage dem threads"""
quit = False
quitting = False
threads = []
def __init__(self, threads=[]):
"""Program entry point"""
self.threads = threads
self.register_signal_handlers()
def register_signal_handlers(self):
# Register signal handler
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
def run(self):
# Main loop
self.start_threads()
while not self.quit:
time.sleep(1)
def start_threads(self):
for t in self.threads:
t.start()
def signal_handler(self, signal, frame):
""" Handle signals """
print("Caught CTRL+C / SIGKILL")
if not self.quitting:
self.quitting = True
self.stop_threads()
self.quit = True
else:
print("BE PATIENT!@#~!#!@#$~!`1111")
def stop_threads(self):
"""Stops all threads and waits for them to quit"""
print("Stopping threads")
for thread in self.threads:
thread.stop()
while threading.activeCount() > 1:
print("Waiting for %s threads" % threading.activeCount())
time.sleep(1)
print("All threads stopped")
|
Remove config handling from threadmanager (was unused)
|
Remove config handling from threadmanager (was unused)
|
Python
|
bsd-3-clause
|
spilgames/mysql-statsd,medvedik/mysql-statsd,art-spilgames/mysql-statsd,db-art/mysql-statsd,medvedik/mysql-statsd,bnkr/mysql-statsd
|
import Queue
import signal
import threading
import time
class ThreadManager():
"""Knows how to manage dem threads"""
quit = False
quitting = False
threads = []
- def __init__(self, queue=Queue.Queue(), threads=[], config={}):
+ def __init__(self, threads=[]):
"""Program entry point"""
-
- # Set up queue
- self.queue = Queue.Queue()
- self.config = config
self.threads = threads
-
self.register_signal_handlers()
def register_signal_handlers(self):
# Register signal handler
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
def run(self):
# Main loop
self.start_threads()
while not self.quit:
time.sleep(1)
def start_threads(self):
for t in self.threads:
t.start()
def signal_handler(self, signal, frame):
""" Handle signals """
print("Caught CTRL+C / SIGKILL")
if not self.quitting:
self.quitting = True
self.stop_threads()
self.quit = True
else:
print("BE PATIENT!@#~!#!@#$~!`1111")
def stop_threads(self):
"""Stops all threads and waits for them to quit"""
print("Stopping threads")
for thread in self.threads:
thread.stop()
while threading.activeCount() > 1:
print("Waiting for %s threads" % threading.activeCount())
time.sleep(1)
print("All threads stopped")
|
Remove config handling from threadmanager (was unused)
|
## Code Before:
import Queue
import signal
import threading
import time
class ThreadManager():
"""Knows how to manage dem threads"""
quit = False
quitting = False
threads = []
def __init__(self, queue=Queue.Queue(), threads=[], config={}):
"""Program entry point"""
# Set up queue
self.queue = Queue.Queue()
self.config = config
self.threads = threads
self.register_signal_handlers()
def register_signal_handlers(self):
# Register signal handler
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
def run(self):
# Main loop
self.start_threads()
while not self.quit:
time.sleep(1)
def start_threads(self):
for t in self.threads:
t.start()
def signal_handler(self, signal, frame):
""" Handle signals """
print("Caught CTRL+C / SIGKILL")
if not self.quitting:
self.quitting = True
self.stop_threads()
self.quit = True
else:
print("BE PATIENT!@#~!#!@#$~!`1111")
def stop_threads(self):
"""Stops all threads and waits for them to quit"""
print("Stopping threads")
for thread in self.threads:
thread.stop()
while threading.activeCount() > 1:
print("Waiting for %s threads" % threading.activeCount())
time.sleep(1)
print("All threads stopped")
## Instruction:
Remove config handling from threadmanager (was unused)
## Code After:
import Queue
import signal
import threading
import time
class ThreadManager():
"""Knows how to manage dem threads"""
quit = False
quitting = False
threads = []
def __init__(self, threads=[]):
"""Program entry point"""
self.threads = threads
self.register_signal_handlers()
def register_signal_handlers(self):
# Register signal handler
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
def run(self):
# Main loop
self.start_threads()
while not self.quit:
time.sleep(1)
def start_threads(self):
for t in self.threads:
t.start()
def signal_handler(self, signal, frame):
""" Handle signals """
print("Caught CTRL+C / SIGKILL")
if not self.quitting:
self.quitting = True
self.stop_threads()
self.quit = True
else:
print("BE PATIENT!@#~!#!@#$~!`1111")
def stop_threads(self):
"""Stops all threads and waits for them to quit"""
print("Stopping threads")
for thread in self.threads:
thread.stop()
while threading.activeCount() > 1:
print("Waiting for %s threads" % threading.activeCount())
time.sleep(1)
print("All threads stopped")
|
# ... existing code ...
def __init__(self, threads=[]):
"""Program entry point"""
self.threads = threads
self.register_signal_handlers()
# ... rest of the code ...
|
d6f2b132844d1923932447c0ce67c581f723f433
|
wagtail/wagtailadmin/menu.py
|
wagtail/wagtailadmin/menu.py
|
from __future__ import unicode_literals
from six import text_type
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
def __init__(self, label, url, name=None, classnames='', order=1000):
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
def render_html(self):
return format_html(
"""<li class="menu-{0}"><a href="{1}" class="{2}">{3}</a></li>""",
self.name, self.url, self.classnames, self.label)
|
from __future__ import unicode_literals
from six import text_type
try:
# renamed util -> utils in Django 1.7; try the new name first
from django.forms.utils import flatatt
except ImportError:
from django.forms.util import flatatt
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
def __init__(self, label, url, name=None, classnames='', attrs=None, order=1000):
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
if attrs:
self.attr_string = flatatt(attrs)
else:
self.attr_string = ""
def render_html(self):
return format_html(
"""<li class="menu-{0}"><a href="{1}" class="{2}"{3}>{4}</a></li>""",
self.name, self.url, self.classnames, self.attr_string, self.label)
|
Support passing html attributes into MenuItem
|
Support passing html attributes into MenuItem
|
Python
|
bsd-3-clause
|
JoshBarr/wagtail,m-sanders/wagtail,hamsterbacke23/wagtail,benemery/wagtail,jordij/wagtail,nutztherookie/wagtail,mixxorz/wagtail,nutztherookie/wagtail,dresiu/wagtail,serzans/wagtail,mixxorz/wagtail,bjesus/wagtail,nrsimha/wagtail,nilnvoid/wagtail,inonit/wagtail,torchbox/wagtail,wagtail/wagtail,dresiu/wagtail,davecranwell/wagtail,timorieber/wagtail,kurtrwall/wagtail,Pennebaker/wagtail,kaedroho/wagtail,kurtrwall/wagtail,thenewguy/wagtail,jnns/wagtail,nealtodd/wagtail,rsalmaso/wagtail,taedori81/wagtail,mephizzle/wagtail,stevenewey/wagtail,quru/wagtail,marctc/wagtail,tangentlabs/wagtail,Klaudit/wagtail,quru/wagtail,gogobook/wagtail,kurtrwall/wagtail,takeshineshiro/wagtail,rsalmaso/wagtail,quru/wagtail,wagtail/wagtail,thenewguy/wagtail,benjaoming/wagtail,mixxorz/wagtail,taedori81/wagtail,nrsimha/wagtail,taedori81/wagtail,nilnvoid/wagtail,zerolab/wagtail,mephizzle/wagtail,rjsproxy/wagtail,darith27/wagtail,benjaoming/wagtail,iho/wagtail,jnns/wagtail,rv816/wagtail,nealtodd/wagtail,torchbox/wagtail,serzans/wagtail,mephizzle/wagtail,WQuanfeng/wagtail,takeflight/wagtail,mjec/wagtail,thenewguy/wagtail,torchbox/wagtail,rjsproxy/wagtail,jorge-marques/wagtail,m-sanders/wagtail,iho/wagtail,benemery/wagtail,serzans/wagtail,stevenewey/wagtail,janusnic/wagtail,JoshBarr/wagtail,chimeno/wagtail,Tivix/wagtail,chimeno/wagtail,nilnvoid/wagtail,Klaudit/wagtail,chrxr/wagtail,marctc/wagtail,KimGlazebrook/wagtail-experiment,gogobook/wagtail,zerolab/wagtail,dresiu/wagtail,takeflight/wagtail,nimasmi/wagtail,nimasmi/wagtail,JoshBarr/wagtail,Pennebaker/wagtail,hanpama/wagtail,davecranwell/wagtail,iansprice/wagtail,kaedroho/wagtail,inonit/wagtail,mixxorz/wagtail,rv816/wagtail,KimGlazebrook/wagtail-experiment,stevenewey/wagtail,inonit/wagtail,jordij/wagtail,kurtw/wagtail,bjesus/wagtail,mephizzle/wagtail,jorge-marques/wagtail,torchbox/wagtail,nilnvoid/wagtail,chimeno/wagtail,gasman/wagtail,mjec/wagtail,dresiu/wagtail,hanpama/wagtail,hamsterbacke23/wagtail,rv816/wagtail,KimGlazebrook/wagtail-experiment,tangentlabs/wagtail,mayapurmedia/wagtail,willcodefortea/wagtail,FlipperPA/wagtail,FlipperPA/wagtail,gogobook/wagtail,timorieber/wagtail,jnns/wagtail,m-sanders/wagtail,nutztherookie/wagtail,nimasmi/wagtail,hamsterbacke23/wagtail,mjec/wagtail,thenewguy/wagtail,wagtail/wagtail,kaedroho/wagtail,willcodefortea/wagtail,willcodefortea/wagtail,bjesus/wagtail,gasman/wagtail,chrxr/wagtail,gogobook/wagtail,zerolab/wagtail,rjsproxy/wagtail,wagtail/wagtail,nrsimha/wagtail,Klaudit/wagtail,iho/wagtail,mjec/wagtail,chrxr/wagtail,timorieber/wagtail,FlipperPA/wagtail,benemery/wagtail,mikedingjan/wagtail,mikedingjan/wagtail,gasman/wagtail,janusnic/wagtail,Toshakins/wagtail,WQuanfeng/wagtail,rv816/wagtail,takeflight/wagtail,WQuanfeng/wagtail,janusnic/wagtail,rjsproxy/wagtail,nutztherookie/wagtail,janusnic/wagtail,iansprice/wagtail,JoshBarr/wagtail,jnns/wagtail,takeshineshiro/wagtail,kaedroho/wagtail,willcodefortea/wagtail,taedori81/wagtail,bjesus/wagtail,jorge-marques/wagtail,Tivix/wagtail,darith27/wagtail,marctc/wagtail,mayapurmedia/wagtail,gasman/wagtail,mayapurmedia/wagtail,tangentlabs/wagtail,dresiu/wagtail,iansprice/wagtail,kaedroho/wagtail,kurtw/wagtail,inonit/wagtail,benjaoming/wagtail,Tivix/wagtail,zerolab/wagtail,stevenewey/wagtail,mayapurmedia/wagtail,davecranwell/wagtail,jorge-marques/wagtail,darith27/wagtail,chimeno/wagtail,Toshakins/wagtail,rsalmaso/wagtail,nrsimha/wagtail,gasman/wagtail,Tivix/wagtail,nealtodd/wagtail,Pennebaker/wagtail,rsalmaso/wagtail,jordij/wagtail,jorge-marques/wagtail,benemery/wagtail,iho/wagtail,hamsterbacke23/wagtail,FlipperPA/wagtail,hanpama/wagtail,takeshineshiro/wagtail,kurtw/wagtail,nimasmi/wagtail,iansprice/wagtail,kurtrwall/wagtail,nealtodd/wagtail,davecranwell/wagtail,rsalmaso/wagtail,timorieber/wagtail,Pennebaker/wagtail,Klaudit/wagtail,serzans/wagtail,m-sanders/wagtail,marctc/wagtail,taedori81/wagtail,darith27/wagtail,thenewguy/wagtail,chimeno/wagtail,zerolab/wagtail,takeflight/wagtail,chrxr/wagtail,mikedingjan/wagtail,Toshakins/wagtail,mikedingjan/wagtail,KimGlazebrook/wagtail-experiment,quru/wagtail,Toshakins/wagtail,tangentlabs/wagtail,WQuanfeng/wagtail,kurtw/wagtail,mixxorz/wagtail,hanpama/wagtail,takeshineshiro/wagtail,benjaoming/wagtail,jordij/wagtail,wagtail/wagtail
|
from __future__ import unicode_literals
from six import text_type
+
+ try:
+ # renamed util -> utils in Django 1.7; try the new name first
+ from django.forms.utils import flatatt
+ except ImportError:
+ from django.forms.util import flatatt
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
- def __init__(self, label, url, name=None, classnames='', order=1000):
+ def __init__(self, label, url, name=None, classnames='', attrs=None, order=1000):
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
+ if attrs:
+ self.attr_string = flatatt(attrs)
+ else:
+ self.attr_string = ""
+
def render_html(self):
return format_html(
- """<li class="menu-{0}"><a href="{1}" class="{2}">{3}</a></li>""",
+ """<li class="menu-{0}"><a href="{1}" class="{2}"{3}>{4}</a></li>""",
- self.name, self.url, self.classnames, self.label)
+ self.name, self.url, self.classnames, self.attr_string, self.label)
|
Support passing html attributes into MenuItem
|
## Code Before:
from __future__ import unicode_literals
from six import text_type
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
def __init__(self, label, url, name=None, classnames='', order=1000):
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
def render_html(self):
return format_html(
"""<li class="menu-{0}"><a href="{1}" class="{2}">{3}</a></li>""",
self.name, self.url, self.classnames, self.label)
## Instruction:
Support passing html attributes into MenuItem
## Code After:
from __future__ import unicode_literals
from six import text_type
try:
# renamed util -> utils in Django 1.7; try the new name first
from django.forms.utils import flatatt
except ImportError:
from django.forms.util import flatatt
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
def __init__(self, label, url, name=None, classnames='', attrs=None, order=1000):
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
if attrs:
self.attr_string = flatatt(attrs)
else:
self.attr_string = ""
def render_html(self):
return format_html(
"""<li class="menu-{0}"><a href="{1}" class="{2}"{3}>{4}</a></li>""",
self.name, self.url, self.classnames, self.attr_string, self.label)
|
# ... existing code ...
from six import text_type
try:
# renamed util -> utils in Django 1.7; try the new name first
from django.forms.utils import flatatt
except ImportError:
from django.forms.util import flatatt
# ... modified code ...
class MenuItem(object):
def __init__(self, label, url, name=None, classnames='', attrs=None, order=1000):
self.label = label
...
if attrs:
self.attr_string = flatatt(attrs)
else:
self.attr_string = ""
def render_html(self):
...
return format_html(
"""<li class="menu-{0}"><a href="{1}" class="{2}"{3}>{4}</a></li>""",
self.name, self.url, self.classnames, self.attr_string, self.label)
# ... rest of the code ...
|
451951b311ef6e2bb76348a116dc0465f735348e
|
pytest_watch/config.py
|
pytest_watch/config.py
|
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigParser
import pytest
CLI_OPTION_PREFIX = '--'
class CollectConfig(object):
"""
A pytest plugin to gets the configuration file.
"""
def __init__(self):
self.path = None
def pytest_cmdline_main(self, config):
self.path = str(config.inifile)
def merge_config(args):
collect_config = CollectConfig()
pytest.main(['--collect-only'], plugins=[collect_config])
if not collect_config.path:
return
config = ConfigParser()
config.read(collect_config.path)
if not config.has_section('pytest-watch'):
return
for cli_name in args:
if not cli_name.startswith(CLI_OPTION_PREFIX):
continue
config_name = cli_name[len(CLI_OPTION_PREFIX):]
# Let CLI options take precedence
if args[cli_name]:
continue
# Find config option
if not config.has_option('pytest-watch', config_name):
continue
# Merge config option using the expected type
if isinstance(args[cli_name], bool):
args[cli_name] = config.getboolean('pytest-watch', config_name)
else:
args[cli_name] = config.get('pytest-watch', config_name)
|
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigParser
import pytest
CLI_OPTION_PREFIX = '--'
class CollectConfig(object):
"""
A pytest plugin to gets the configuration file.
"""
def __init__(self):
self.path = None
def pytest_cmdline_main(self, config):
if config.inifile:
self.path = str(config.inifile)
def merge_config(args):
collect_config = CollectConfig()
pytest.main(['--collect-only'], plugins=[collect_config])
if not collect_config.path:
return
config = ConfigParser()
config.read(collect_config.path)
if not config.has_section('pytest-watch'):
return
for cli_name in args:
if not cli_name.startswith(CLI_OPTION_PREFIX):
continue
config_name = cli_name[len(CLI_OPTION_PREFIX):]
# Let CLI options take precedence
if args[cli_name]:
continue
# Find config option
if not config.has_option('pytest-watch', config_name):
continue
# Merge config option using the expected type
if isinstance(args[cli_name], bool):
args[cli_name] = config.getboolean('pytest-watch', config_name)
else:
args[cli_name] = config.get('pytest-watch', config_name)
|
Fix running when pytest.ini is not present.
|
Fix running when pytest.ini is not present.
|
Python
|
mit
|
joeyespo/pytest-watch
|
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigParser
import pytest
CLI_OPTION_PREFIX = '--'
class CollectConfig(object):
"""
A pytest plugin to gets the configuration file.
"""
def __init__(self):
self.path = None
def pytest_cmdline_main(self, config):
+ if config.inifile:
- self.path = str(config.inifile)
+ self.path = str(config.inifile)
def merge_config(args):
collect_config = CollectConfig()
pytest.main(['--collect-only'], plugins=[collect_config])
if not collect_config.path:
return
config = ConfigParser()
config.read(collect_config.path)
if not config.has_section('pytest-watch'):
return
for cli_name in args:
if not cli_name.startswith(CLI_OPTION_PREFIX):
continue
config_name = cli_name[len(CLI_OPTION_PREFIX):]
# Let CLI options take precedence
if args[cli_name]:
continue
# Find config option
if not config.has_option('pytest-watch', config_name):
continue
# Merge config option using the expected type
if isinstance(args[cli_name], bool):
args[cli_name] = config.getboolean('pytest-watch', config_name)
else:
args[cli_name] = config.get('pytest-watch', config_name)
|
Fix running when pytest.ini is not present.
|
## Code Before:
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigParser
import pytest
CLI_OPTION_PREFIX = '--'
class CollectConfig(object):
"""
A pytest plugin to gets the configuration file.
"""
def __init__(self):
self.path = None
def pytest_cmdline_main(self, config):
self.path = str(config.inifile)
def merge_config(args):
collect_config = CollectConfig()
pytest.main(['--collect-only'], plugins=[collect_config])
if not collect_config.path:
return
config = ConfigParser()
config.read(collect_config.path)
if not config.has_section('pytest-watch'):
return
for cli_name in args:
if not cli_name.startswith(CLI_OPTION_PREFIX):
continue
config_name = cli_name[len(CLI_OPTION_PREFIX):]
# Let CLI options take precedence
if args[cli_name]:
continue
# Find config option
if not config.has_option('pytest-watch', config_name):
continue
# Merge config option using the expected type
if isinstance(args[cli_name], bool):
args[cli_name] = config.getboolean('pytest-watch', config_name)
else:
args[cli_name] = config.get('pytest-watch', config_name)
## Instruction:
Fix running when pytest.ini is not present.
## Code After:
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigParser
import pytest
CLI_OPTION_PREFIX = '--'
class CollectConfig(object):
"""
A pytest plugin to gets the configuration file.
"""
def __init__(self):
self.path = None
def pytest_cmdline_main(self, config):
if config.inifile:
self.path = str(config.inifile)
def merge_config(args):
collect_config = CollectConfig()
pytest.main(['--collect-only'], plugins=[collect_config])
if not collect_config.path:
return
config = ConfigParser()
config.read(collect_config.path)
if not config.has_section('pytest-watch'):
return
for cli_name in args:
if not cli_name.startswith(CLI_OPTION_PREFIX):
continue
config_name = cli_name[len(CLI_OPTION_PREFIX):]
# Let CLI options take precedence
if args[cli_name]:
continue
# Find config option
if not config.has_option('pytest-watch', config_name):
continue
# Merge config option using the expected type
if isinstance(args[cli_name], bool):
args[cli_name] = config.getboolean('pytest-watch', config_name)
else:
args[cli_name] = config.get('pytest-watch', config_name)
|
...
def pytest_cmdline_main(self, config):
if config.inifile:
self.path = str(config.inifile)
...
|
6cf3baed6e5f707e5c307388018f4bb3121327f9
|
nanoservice/config.py
|
nanoservice/config.py
|
""" Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = {}
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
|
""" Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
class DotDict(dict):
""" Access a dictionary like an object """
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
self[key] = value
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = DotDict()
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
|
Access the conf like a object
|
Access the conf like a object
|
Python
|
mit
|
walkr/nanoservice
|
""" Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
+
+
+ class DotDict(dict):
+ """ Access a dictionary like an object """
+
+ def __getattr__(self, key):
+ return self[key]
+
+ def __setattr__(self, key, value):
+ self[key] = value
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
- conf = {}
+ conf = DotDict()
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
|
Access the conf like a object
|
## Code Before:
""" Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = {}
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
## Instruction:
Access the conf like a object
## Code After:
""" Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
class DotDict(dict):
""" Access a dictionary like an object """
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
self[key] = value
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = DotDict()
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
|
// ... existing code ...
from .error import ConfigError
class DotDict(dict):
""" Access a dictionary like an object """
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
self[key] = value
// ... modified code ...
"""
conf = DotDict()
// ... rest of the code ...
|
33f7e94385a8d4fbba797fc81b2565906604c9a4
|
src/zeit/content/cp/browser/area.py
|
src/zeit/content/cp/browser/area.py
|
import zeit.content.cp.browser.blocks.teaser
import zeit.content.cp.interfaces
import zeit.edit.browser.block
import zeit.edit.browser.view
import zope.formlib.form
class ViewletManager(zeit.edit.browser.block.BlockViewletManager):
@property
def css_class(self):
classes = super(ViewletManager, self).css_class
return ' '.join(['editable-area', classes])
class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout):
interface = zeit.content.cp.interfaces.IArea
layout_prefix = 'teaserbar' # XXX should be area
layouts = ()
form_fields = zope.formlib.form.Fields() # XXX implement me
class EditCommon(zeit.edit.browser.view.EditBox):
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IArea).select(
'supertitle', 'teaserText', 'background_color')
form_fields['background_color'].custom_widget = (
zeit.cms.browser.widget.ColorpickerWidget)
|
import zeit.content.cp.browser.blocks.teaser
import zeit.content.cp.interfaces
import zeit.edit.browser.block
import zeit.edit.browser.view
import zope.formlib.form
class ViewletManager(zeit.edit.browser.block.BlockViewletManager):
@property
def css_class(self):
classes = super(ViewletManager, self).css_class
return ' '.join(['editable-area', classes])
class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout):
interface = zeit.content.cp.interfaces.IArea
layout_prefix = 'teaserbar' # XXX should be area
layouts = ()
class EditCommon(zeit.edit.browser.view.EditBox):
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IArea).select(
'supertitle', 'teaserText', 'background_color')
form_fields['background_color'].custom_widget = (
zeit.cms.browser.widget.ColorpickerWidget)
|
Remove field that has now the same default implementation on it's super class.
|
Remove field that has now the same default implementation on it's super class.
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.content.cp,ZeitOnline/zeit.content.cp
|
import zeit.content.cp.browser.blocks.teaser
import zeit.content.cp.interfaces
import zeit.edit.browser.block
import zeit.edit.browser.view
import zope.formlib.form
class ViewletManager(zeit.edit.browser.block.BlockViewletManager):
@property
def css_class(self):
classes = super(ViewletManager, self).css_class
return ' '.join(['editable-area', classes])
class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout):
interface = zeit.content.cp.interfaces.IArea
layout_prefix = 'teaserbar' # XXX should be area
layouts = ()
- form_fields = zope.formlib.form.Fields() # XXX implement me
-
class EditCommon(zeit.edit.browser.view.EditBox):
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IArea).select(
'supertitle', 'teaserText', 'background_color')
form_fields['background_color'].custom_widget = (
zeit.cms.browser.widget.ColorpickerWidget)
|
Remove field that has now the same default implementation on it's super class.
|
## Code Before:
import zeit.content.cp.browser.blocks.teaser
import zeit.content.cp.interfaces
import zeit.edit.browser.block
import zeit.edit.browser.view
import zope.formlib.form
class ViewletManager(zeit.edit.browser.block.BlockViewletManager):
@property
def css_class(self):
classes = super(ViewletManager, self).css_class
return ' '.join(['editable-area', classes])
class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout):
interface = zeit.content.cp.interfaces.IArea
layout_prefix = 'teaserbar' # XXX should be area
layouts = ()
form_fields = zope.formlib.form.Fields() # XXX implement me
class EditCommon(zeit.edit.browser.view.EditBox):
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IArea).select(
'supertitle', 'teaserText', 'background_color')
form_fields['background_color'].custom_widget = (
zeit.cms.browser.widget.ColorpickerWidget)
## Instruction:
Remove field that has now the same default implementation on it's super class.
## Code After:
import zeit.content.cp.browser.blocks.teaser
import zeit.content.cp.interfaces
import zeit.edit.browser.block
import zeit.edit.browser.view
import zope.formlib.form
class ViewletManager(zeit.edit.browser.block.BlockViewletManager):
@property
def css_class(self):
classes = super(ViewletManager, self).css_class
return ' '.join(['editable-area', classes])
class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout):
interface = zeit.content.cp.interfaces.IArea
layout_prefix = 'teaserbar' # XXX should be area
layouts = ()
class EditCommon(zeit.edit.browser.view.EditBox):
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IArea).select(
'supertitle', 'teaserText', 'background_color')
form_fields['background_color'].custom_widget = (
zeit.cms.browser.widget.ColorpickerWidget)
|
// ... existing code ...
// ... rest of the code ...
|
590494bf9d840cb6353260392b94700656db5d47
|
fabfile/__init__.py
|
fabfile/__init__.py
|
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color tests"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
|
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
# Default to explicitly targeting the 'tests' folder, but only if nothing
# is being overridden.
tests = "" if args else " tests"
default_args = "-sv --with-doctest --nologcapture --with-color %s" % tests
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
|
Fix super dumb mistake causing all test runs to hit tests folder.
|
Fix super dumb mistake causing all test runs to hit tests folder.
This causes integration level tests to run both test suites.
Oops!
|
Python
|
bsd-2-clause
|
cgvarela/fabric,raimon49/fabric,elijah513/fabric,StackStorm/fabric,amaniak/fabric,kmonsoor/fabric,mathiasertl/fabric,opavader/fabric,bspink/fabric,fernandezcuesta/fabric,pgroudas/fabric,likesxuqiang/fabric,xLegoz/fabric,tekapo/fabric,jaraco/fabric,bitmonk/fabric,sdelements/fabric,kxxoling/fabric,itoed/fabric,SamuelMarks/fabric,qinrong/fabric,TarasRudnyk/fabric,cmattoon/fabric,haridsv/fabric,rodrigc/fabric,ploxiln/fabric,askulkarni2/fabric,rane-hs/fabric-py3,rbramwell/fabric,tolbkni/fabric
|
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
+ # Default to explicitly targeting the 'tests' folder, but only if nothing
+ # is being overridden.
+ tests = "" if args else " tests"
- default_args = "-sv --with-doctest --nologcapture --with-color tests"
+ default_args = "-sv --with-doctest --nologcapture --with-color %s" % tests
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
|
Fix super dumb mistake causing all test runs to hit tests folder.
|
## Code Before:
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color tests"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
## Instruction:
Fix super dumb mistake causing all test runs to hit tests folder.
## Code After:
from __future__ import with_statement
import nose
from fabric.api import abort, local, task
import tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
# Default to explicitly targeting the 'tests' folder, but only if nothing
# is being overridden.
tests = "" if args else " tests"
default_args = "-sv --with-doctest --nologcapture --with-color %s" % tests
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag, push tag to Github, & upload new version to PyPI.
"""
tag.tag(force=force, push='yes')
upload()
|
# ... existing code ...
"""
# Default to explicitly targeting the 'tests' folder, but only if nothing
# is being overridden.
tests = "" if args else " tests"
default_args = "-sv --with-doctest --nologcapture --with-color %s" % tests
default_args += (" " + args) if args else ""
# ... rest of the code ...
|
7106317db23165220754f1cf45e7a8d30a9a76db
|
dyfunconn/fc/cos.py
|
dyfunconn/fc/cos.py
|
from ..analytic_signal import analytic_signal
import numpy as np
def cos(data, fb=None, fs=None, pairs=None):
"""
"""
n_samples, n_rois = np.shape(data)
X = None
if fb is not None and fs is not None:
_, uphases, _ = analytic_signal(data, fb, fs)
X = uphases
else:
X = data
conn_mtx = np.zeros((n_rois, n_rois), dtype=np.float32)
for k in range(n_rois):
for l in range(k + 1, n_rois):
val = np.sum(np.cos(X[k, :] - X[l, : ])) / np.float32(n_samples)
val = np.abs(val)
conn_mtx[k, l] = val
return conn_mtx
|
from ..analytic_signal import analytic_signal
import numpy as np
def cos(data, fb=None, fs=None, pairs=None):
"""
"""
n_rois, n_samples = np.shape(data)
X = None
if fb is not None and fs is not None:
_, uphases, _ = analytic_signal(data, fb, fs)
X = uphases
else:
X = data
conn_mtx = np.zeros((n_rois, n_rois), dtype=np.float32)
for k in range(n_rois):
for l in range(k + 1, n_rois):
val = np.sum(np.cos(X[k, :] - X[l, :])) / np.float32(n_samples)
val = np.abs(val)
conn_mtx[k, l] = val
return conn_mtx
|
Change the order of shape.
|
Change the order of shape.
|
Python
|
bsd-3-clause
|
makism/dyfunconn
|
from ..analytic_signal import analytic_signal
import numpy as np
def cos(data, fb=None, fs=None, pairs=None):
"""
"""
- n_samples, n_rois = np.shape(data)
+ n_rois, n_samples = np.shape(data)
X = None
if fb is not None and fs is not None:
_, uphases, _ = analytic_signal(data, fb, fs)
X = uphases
else:
X = data
conn_mtx = np.zeros((n_rois, n_rois), dtype=np.float32)
for k in range(n_rois):
for l in range(k + 1, n_rois):
- val = np.sum(np.cos(X[k, :] - X[l, : ])) / np.float32(n_samples)
+ val = np.sum(np.cos(X[k, :] - X[l, :])) / np.float32(n_samples)
val = np.abs(val)
conn_mtx[k, l] = val
return conn_mtx
|
Change the order of shape.
|
## Code Before:
from ..analytic_signal import analytic_signal
import numpy as np
def cos(data, fb=None, fs=None, pairs=None):
"""
"""
n_samples, n_rois = np.shape(data)
X = None
if fb is not None and fs is not None:
_, uphases, _ = analytic_signal(data, fb, fs)
X = uphases
else:
X = data
conn_mtx = np.zeros((n_rois, n_rois), dtype=np.float32)
for k in range(n_rois):
for l in range(k + 1, n_rois):
val = np.sum(np.cos(X[k, :] - X[l, : ])) / np.float32(n_samples)
val = np.abs(val)
conn_mtx[k, l] = val
return conn_mtx
## Instruction:
Change the order of shape.
## Code After:
from ..analytic_signal import analytic_signal
import numpy as np
def cos(data, fb=None, fs=None, pairs=None):
"""
"""
n_rois, n_samples = np.shape(data)
X = None
if fb is not None and fs is not None:
_, uphases, _ = analytic_signal(data, fb, fs)
X = uphases
else:
X = data
conn_mtx = np.zeros((n_rois, n_rois), dtype=np.float32)
for k in range(n_rois):
for l in range(k + 1, n_rois):
val = np.sum(np.cos(X[k, :] - X[l, :])) / np.float32(n_samples)
val = np.abs(val)
conn_mtx[k, l] = val
return conn_mtx
|
...
"""
n_rois, n_samples = np.shape(data)
...
for l in range(k + 1, n_rois):
val = np.sum(np.cos(X[k, :] - X[l, :])) / np.float32(n_samples)
val = np.abs(val)
...
|
2e040a77b70b4a07227f5aa3cb3aee6b8c84f4e0
|
src/livedumper/common.py
|
src/livedumper/common.py
|
"Common functions that may be used everywhere"
from __future__ import print_function
import os
import sys
from distutils.util import strtobool
def yes_no_query(question):
"""Ask the user *question* for 'yes' or 'no'; ask again until user
inputs a valid option.
Returns:
'True' if user answered 'y', 'yes', 't', 'true', 'on' or '1'.
'False' if user answered 'n', 'no', 'f', 'false', 'off' or '0'.
"""
print("{} (y/n)".format(question), end=" "),
while True:
try:
return strtobool(input().lower())
except ValueError:
print("Please respond with 'y' or 'n'.")
def ask_overwrite(dest):
"""Check if file *dest* exists. If 'True', asks if the user wants
to overwrite it (just remove the file for later overwrite).
"""
msg = "File '{}' already exists. Overwrite file?".format(dest)
if os.path.exists(dest):
if yes_no_query(msg):
os.remove(dest)
else:
sys.exit("Cancelling operation...")
|
"Common functions that may be used everywhere"
from __future__ import print_function
import os
import sys
from distutils.util import strtobool
try:
input = raw_input
except NameError:
pass
def yes_no_query(question):
"""Ask the user *question* for 'yes' or 'no'; ask again until user
inputs a valid option.
Returns:
'True' if user answered 'y', 'yes', 't', 'true', 'on' or '1'.
'False' if user answered 'n', 'no', 'f', 'false', 'off' or '0'.
"""
print("{} (y/n)".format(question), end=" "),
while True:
try:
return strtobool(input().lower())
except ValueError:
print("Please respond with 'y' or 'n'.")
def ask_overwrite(dest):
"""Check if file *dest* exists. If 'True', asks if the user wants
to overwrite it (just remove the file for later overwrite).
"""
msg = "File '{}' already exists. Overwrite file?".format(dest)
if os.path.exists(dest):
if yes_no_query(msg):
os.remove(dest)
else:
sys.exit("Cancelling operation...")
|
Fix Python 2 compatibility, again
|
Fix Python 2 compatibility, again
|
Python
|
bsd-2-clause
|
m45t3r/livedumper
|
"Common functions that may be used everywhere"
from __future__ import print_function
import os
import sys
from distutils.util import strtobool
+
+ try:
+ input = raw_input
+ except NameError:
+ pass
def yes_no_query(question):
"""Ask the user *question* for 'yes' or 'no'; ask again until user
inputs a valid option.
Returns:
'True' if user answered 'y', 'yes', 't', 'true', 'on' or '1'.
'False' if user answered 'n', 'no', 'f', 'false', 'off' or '0'.
"""
print("{} (y/n)".format(question), end=" "),
while True:
try:
return strtobool(input().lower())
except ValueError:
print("Please respond with 'y' or 'n'.")
def ask_overwrite(dest):
"""Check if file *dest* exists. If 'True', asks if the user wants
to overwrite it (just remove the file for later overwrite).
"""
msg = "File '{}' already exists. Overwrite file?".format(dest)
if os.path.exists(dest):
if yes_no_query(msg):
os.remove(dest)
else:
sys.exit("Cancelling operation...")
|
Fix Python 2 compatibility, again
|
## Code Before:
"Common functions that may be used everywhere"
from __future__ import print_function
import os
import sys
from distutils.util import strtobool
def yes_no_query(question):
"""Ask the user *question* for 'yes' or 'no'; ask again until user
inputs a valid option.
Returns:
'True' if user answered 'y', 'yes', 't', 'true', 'on' or '1'.
'False' if user answered 'n', 'no', 'f', 'false', 'off' or '0'.
"""
print("{} (y/n)".format(question), end=" "),
while True:
try:
return strtobool(input().lower())
except ValueError:
print("Please respond with 'y' or 'n'.")
def ask_overwrite(dest):
"""Check if file *dest* exists. If 'True', asks if the user wants
to overwrite it (just remove the file for later overwrite).
"""
msg = "File '{}' already exists. Overwrite file?".format(dest)
if os.path.exists(dest):
if yes_no_query(msg):
os.remove(dest)
else:
sys.exit("Cancelling operation...")
## Instruction:
Fix Python 2 compatibility, again
## Code After:
"Common functions that may be used everywhere"
from __future__ import print_function
import os
import sys
from distutils.util import strtobool
try:
input = raw_input
except NameError:
pass
def yes_no_query(question):
"""Ask the user *question* for 'yes' or 'no'; ask again until user
inputs a valid option.
Returns:
'True' if user answered 'y', 'yes', 't', 'true', 'on' or '1'.
'False' if user answered 'n', 'no', 'f', 'false', 'off' or '0'.
"""
print("{} (y/n)".format(question), end=" "),
while True:
try:
return strtobool(input().lower())
except ValueError:
print("Please respond with 'y' or 'n'.")
def ask_overwrite(dest):
"""Check if file *dest* exists. If 'True', asks if the user wants
to overwrite it (just remove the file for later overwrite).
"""
msg = "File '{}' already exists. Overwrite file?".format(dest)
if os.path.exists(dest):
if yes_no_query(msg):
os.remove(dest)
else:
sys.exit("Cancelling operation...")
|
...
from distutils.util import strtobool
try:
input = raw_input
except NameError:
pass
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.