commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ca4dc40c14426a97c532263135b885c45dcc8e77
|
account_payment_mode/models/res_partner_bank.py
|
account_payment_mode/models/res_partner_bank.py
|
from openerp import models, fields
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
# TODO: It doesn't work, I don't understand why
# So I change the label of the field in the view
acc_type = fields.Char(string='Bank Account Type')
|
from openerp import models, fields
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
# I also have to change the label of the field in the view
# I store the field, so that we can do groupby and search on it
acc_type = fields.Char(string='Bank Account Type', store=True)
|
Store field acc_type on res.partner.bank, so that we can search and groupby on it
|
Store field acc_type on res.partner.bank, so that we can search and groupby on it
|
Python
|
agpl-3.0
|
CompassionCH/bank-payment,CompassionCH/bank-payment
|
from openerp import models, fields
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
- # TODO: It doesn't work, I don't understand why
- # So I change the label of the field in the view
+ # I also have to change the label of the field in the view
+ # I store the field, so that we can do groupby and search on it
- acc_type = fields.Char(string='Bank Account Type')
+ acc_type = fields.Char(string='Bank Account Type', store=True)
|
Store field acc_type on res.partner.bank, so that we can search and groupby on it
|
## Code Before:
from openerp import models, fields
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
# TODO: It doesn't work, I don't understand why
# So I change the label of the field in the view
acc_type = fields.Char(string='Bank Account Type')
## Instruction:
Store field acc_type on res.partner.bank, so that we can search and groupby on it
## Code After:
from openerp import models, fields
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
# I also have to change the label of the field in the view
# I store the field, so that we can do groupby and search on it
acc_type = fields.Char(string='Bank Account Type', store=True)
|
from openerp import models, fields
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
- # TODO: It doesn't work, I don't understand why
- # So I change the label of the field in the view
? ^ ^
+ # I also have to change the label of the field in the view
? ^^^^^ ^^^^^^^
+ # I store the field, so that we can do groupby and search on it
- acc_type = fields.Char(string='Bank Account Type')
+ acc_type = fields.Char(string='Bank Account Type', store=True)
? ++++++++++++
|
5683aa0d2674214050ed1ea97e528ba39e39b126
|
cosmos/cli.py
|
cosmos/cli.py
|
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if not os.path.exists(value):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if os.path.dirname(value) and not os.path.isdir(os.path.dirname(value)):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('-l', '--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('-f', '--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('-d', '--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('-b', '--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
Fix for checking directory and short params.
|
Fix for checking directory and short params.
|
Python
|
mit
|
astrosat/cOSMos
|
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
- if not os.path.exists(value):
+ if os.path.dirname(value) and not os.path.isdir(os.path.dirname(value)):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
- @click.option('--location', type=str,
+ @click.option('-l', '--location', type=str,
help='input location name(city, country)', prompt=True)
- @click.option('--filename', type=str, callback=validate_filename,
+ @click.option('-f', '--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
- @click.option('--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
+ @click.option('-d', '--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
- @click.option('--bbox', type=(float, float, float, float),
+ @click.option('-b', '--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
Fix for checking directory and short params.
|
## Code Before:
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if not os.path.exists(value):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
## Instruction:
Fix for checking directory and short params.
## Code After:
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if os.path.dirname(value) and not os.path.isdir(os.path.dirname(value)):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('-l', '--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('-f', '--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('-d', '--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('-b', '--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
- if not os.path.exists(value):
+ if os.path.dirname(value) and not os.path.isdir(os.path.dirname(value)):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
- @click.option('--location', type=str,
+ @click.option('-l', '--location', type=str,
? ++++++
help='input location name(city, country)', prompt=True)
- @click.option('--filename', type=str, callback=validate_filename,
+ @click.option('-f', '--filename', type=str, callback=validate_filename,
? ++++++
help='output file name', prompt=True)
- @click.option('--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
+ @click.option('-d', '--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
? ++++++
default='roads', help='data type')
- @click.option('--bbox', type=(float, float, float, float),
+ @click.option('-b', '--bbox', type=(float, float, float, float),
? ++++++
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
a116c3eae892a73b11372225a9bdf0194db75598
|
glanerbeard/web.py
|
glanerbeard/web.py
|
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return str(shows)
if __name__ == '__main__':
app.debug = True
app.run()
|
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return render_template('json.html', json=shows)
|
Use a template to render json.
|
Use a template to render json.
|
Python
|
apache-2.0
|
daenney/glanerbeard
|
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
- return str(shows)
+ return render_template('json.html', json=shows)
-
- if __name__ == '__main__':
- app.debug = True
- app.run()
-
|
Use a template to render json.
|
## Code Before:
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return str(shows)
if __name__ == '__main__':
app.debug = True
app.run()
## Instruction:
Use a template to render json.
## Code After:
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return render_template('json.html', json=shows)
|
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
+ return render_template('json.html', json=shows)
- return str(shows)
-
-
- if __name__ == '__main__':
- app.debug = True
- app.run()
|
1a8f67ec1eaa97aebe25d7d6625a237f8e1ce151
|
example/tests/integration/test_includes.py
|
example/tests/integration/test_includes.py
|
import pytest
from django.core.urlresolvers import reverse
from example.tests.utils import load_json
pytestmark = pytest.mark.django_db
def test_included_data_on_list(multiple_entries, client):
response = client.get(reverse("entry-list") + '?include=comments&page_size=5')
included = load_json(response.content).get('included')
assert len(load_json(response.content)['data']) == len(multiple_entries), 'Incorrect entry count'
assert [x.get('type') for x in included] == ['comments'], 'List included types are incorrect'
comment_count = len([resource for resource in included if resource["type"] == "comments"])
expected_comment_count = sum([entry.comment_set.count() for entry in multiple_entries])
assert comment_count == expected_comment_count, 'List comment count is incorrect'
def test_included_data_on_detail(single_entry, client):
response = client.get(reverse("entry-detail", kwargs={'pk': single_entry.pk}) + '?include=comments')
included = load_json(response.content).get('included')
assert [x.get('type') for x in included] == ['comments'], 'Detail included types are incorrect'
comment_count = len([resource for resource in included if resource["type"] == "comments"])
assert comment_count == single_entry.comment_set.count(), 'Detail comment count is incorrect'
|
import pytest
from django.core.urlresolvers import reverse
from example.tests.utils import load_json
pytestmark = pytest.mark.django_db
def test_included_data_on_list(multiple_entries, client):
response = client.get(reverse("entry-list") + '?include=comments&page_size=5')
included = load_json(response.content).get('included')
assert len(load_json(response.content)['data']) == len(multiple_entries), 'Incorrect entry count'
assert [x.get('type') for x in included] == ['comments', 'comments'], 'List included types are incorrect'
comment_count = len([resource for resource in included if resource["type"] == "comments"])
expected_comment_count = sum([entry.comment_set.count() for entry in multiple_entries])
assert comment_count == expected_comment_count, 'List comment count is incorrect'
def test_included_data_on_detail(single_entry, client):
response = client.get(reverse("entry-detail", kwargs={'pk': single_entry.pk}) + '?include=comments')
included = load_json(response.content).get('included')
assert [x.get('type') for x in included] == ['comments'], 'Detail included types are incorrect'
comment_count = len([resource for resource in included if resource["type"] == "comments"])
expected_comment_count = single_entry.comment_set.count()
assert comment_count == expected_comment_count, 'Detail comment count is incorrect'
|
Fix for test included_data_on_list included types check
|
Fix for test included_data_on_list included types check
|
Python
|
bsd-2-clause
|
django-json-api/rest_framework_ember,scottfisk/django-rest-framework-json-api,schtibe/django-rest-framework-json-api,Instawork/django-rest-framework-json-api,martinmaillard/django-rest-framework-json-api,lukaslundgren/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,leo-naeka/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,pombredanne/django-rest-framework-json-api,leo-naeka/rest_framework_ember,abdulhaq-e/django-rest-framework-json-api
|
import pytest
from django.core.urlresolvers import reverse
from example.tests.utils import load_json
pytestmark = pytest.mark.django_db
def test_included_data_on_list(multiple_entries, client):
response = client.get(reverse("entry-list") + '?include=comments&page_size=5')
included = load_json(response.content).get('included')
assert len(load_json(response.content)['data']) == len(multiple_entries), 'Incorrect entry count'
- assert [x.get('type') for x in included] == ['comments'], 'List included types are incorrect'
+ assert [x.get('type') for x in included] == ['comments', 'comments'], 'List included types are incorrect'
comment_count = len([resource for resource in included if resource["type"] == "comments"])
expected_comment_count = sum([entry.comment_set.count() for entry in multiple_entries])
assert comment_count == expected_comment_count, 'List comment count is incorrect'
def test_included_data_on_detail(single_entry, client):
response = client.get(reverse("entry-detail", kwargs={'pk': single_entry.pk}) + '?include=comments')
included = load_json(response.content).get('included')
assert [x.get('type') for x in included] == ['comments'], 'Detail included types are incorrect'
+
comment_count = len([resource for resource in included if resource["type"] == "comments"])
+ expected_comment_count = single_entry.comment_set.count()
- assert comment_count == single_entry.comment_set.count(), 'Detail comment count is incorrect'
+ assert comment_count == expected_comment_count, 'Detail comment count is incorrect'
|
Fix for test included_data_on_list included types check
|
## Code Before:
import pytest
from django.core.urlresolvers import reverse
from example.tests.utils import load_json
pytestmark = pytest.mark.django_db
def test_included_data_on_list(multiple_entries, client):
response = client.get(reverse("entry-list") + '?include=comments&page_size=5')
included = load_json(response.content).get('included')
assert len(load_json(response.content)['data']) == len(multiple_entries), 'Incorrect entry count'
assert [x.get('type') for x in included] == ['comments'], 'List included types are incorrect'
comment_count = len([resource for resource in included if resource["type"] == "comments"])
expected_comment_count = sum([entry.comment_set.count() for entry in multiple_entries])
assert comment_count == expected_comment_count, 'List comment count is incorrect'
def test_included_data_on_detail(single_entry, client):
response = client.get(reverse("entry-detail", kwargs={'pk': single_entry.pk}) + '?include=comments')
included = load_json(response.content).get('included')
assert [x.get('type') for x in included] == ['comments'], 'Detail included types are incorrect'
comment_count = len([resource for resource in included if resource["type"] == "comments"])
assert comment_count == single_entry.comment_set.count(), 'Detail comment count is incorrect'
## Instruction:
Fix for test included_data_on_list included types check
## Code After:
import pytest
from django.core.urlresolvers import reverse
from example.tests.utils import load_json
pytestmark = pytest.mark.django_db
def test_included_data_on_list(multiple_entries, client):
response = client.get(reverse("entry-list") + '?include=comments&page_size=5')
included = load_json(response.content).get('included')
assert len(load_json(response.content)['data']) == len(multiple_entries), 'Incorrect entry count'
assert [x.get('type') for x in included] == ['comments', 'comments'], 'List included types are incorrect'
comment_count = len([resource for resource in included if resource["type"] == "comments"])
expected_comment_count = sum([entry.comment_set.count() for entry in multiple_entries])
assert comment_count == expected_comment_count, 'List comment count is incorrect'
def test_included_data_on_detail(single_entry, client):
response = client.get(reverse("entry-detail", kwargs={'pk': single_entry.pk}) + '?include=comments')
included = load_json(response.content).get('included')
assert [x.get('type') for x in included] == ['comments'], 'Detail included types are incorrect'
comment_count = len([resource for resource in included if resource["type"] == "comments"])
expected_comment_count = single_entry.comment_set.count()
assert comment_count == expected_comment_count, 'Detail comment count is incorrect'
|
import pytest
from django.core.urlresolvers import reverse
from example.tests.utils import load_json
pytestmark = pytest.mark.django_db
def test_included_data_on_list(multiple_entries, client):
response = client.get(reverse("entry-list") + '?include=comments&page_size=5')
included = load_json(response.content).get('included')
assert len(load_json(response.content)['data']) == len(multiple_entries), 'Incorrect entry count'
- assert [x.get('type') for x in included] == ['comments'], 'List included types are incorrect'
+ assert [x.get('type') for x in included] == ['comments', 'comments'], 'List included types are incorrect'
? ++++++++++++
comment_count = len([resource for resource in included if resource["type"] == "comments"])
expected_comment_count = sum([entry.comment_set.count() for entry in multiple_entries])
assert comment_count == expected_comment_count, 'List comment count is incorrect'
def test_included_data_on_detail(single_entry, client):
response = client.get(reverse("entry-detail", kwargs={'pk': single_entry.pk}) + '?include=comments')
included = load_json(response.content).get('included')
assert [x.get('type') for x in included] == ['comments'], 'Detail included types are incorrect'
+
comment_count = len([resource for resource in included if resource["type"] == "comments"])
+ expected_comment_count = single_entry.comment_set.count()
- assert comment_count == single_entry.comment_set.count(), 'Detail comment count is incorrect'
? ----- ------ ---- --
+ assert comment_count == expected_comment_count, 'Detail comment count is incorrect'
? +++++++
|
4a62214f0c9e8789b8453a48c0a880c4ac6236cb
|
saleor/product/migrations/0123_auto_20200904_1251.py
|
saleor/product/migrations/0123_auto_20200904_1251.py
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
|
from django.db import migrations
from django.db.models import Count
def remove_variant_image_duplicates(apps, schema_editor):
ProductImage = apps.get_model("product", "ProductImage")
VariantImage = apps.get_model("product", "VariantImage")
duplicated_images = (
ProductImage.objects.values("pk", "variant_images__variant")
.annotate(variant_count=Count("variant_images__variant"))
.filter(variant_count__gte=2)
)
variant_image_ids_to_remove = []
for image_data in duplicated_images:
ids = VariantImage.objects.filter(
variant=image_data["variant_images__variant"], image__pk=image_data["pk"],
)[1:].values_list("pk", flat=True)
variant_image_ids_to_remove += ids
VariantImage.objects.filter(pk__in=variant_image_ids_to_remove).delete()
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.RunPython(
remove_variant_image_duplicates, migrations.RunPython.noop
),
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
|
Drop duplicated VariantImages before migration to unique together
|
Drop duplicated VariantImages before migration to unique together
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
from django.db import migrations
+ from django.db.models import Count
+
+
+ def remove_variant_image_duplicates(apps, schema_editor):
+ ProductImage = apps.get_model("product", "ProductImage")
+ VariantImage = apps.get_model("product", "VariantImage")
+
+ duplicated_images = (
+ ProductImage.objects.values("pk", "variant_images__variant")
+ .annotate(variant_count=Count("variant_images__variant"))
+ .filter(variant_count__gte=2)
+ )
+
+ variant_image_ids_to_remove = []
+ for image_data in duplicated_images:
+ ids = VariantImage.objects.filter(
+ variant=image_data["variant_images__variant"], image__pk=image_data["pk"],
+ )[1:].values_list("pk", flat=True)
+ variant_image_ids_to_remove += ids
+
+ VariantImage.objects.filter(pk__in=variant_image_ids_to_remove).delete()
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
+ migrations.RunPython(
+ remove_variant_image_duplicates, migrations.RunPython.noop
+ ),
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
|
Drop duplicated VariantImages before migration to unique together
|
## Code Before:
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
## Instruction:
Drop duplicated VariantImages before migration to unique together
## Code After:
from django.db import migrations
from django.db.models import Count
def remove_variant_image_duplicates(apps, schema_editor):
ProductImage = apps.get_model("product", "ProductImage")
VariantImage = apps.get_model("product", "VariantImage")
duplicated_images = (
ProductImage.objects.values("pk", "variant_images__variant")
.annotate(variant_count=Count("variant_images__variant"))
.filter(variant_count__gte=2)
)
variant_image_ids_to_remove = []
for image_data in duplicated_images:
ids = VariantImage.objects.filter(
variant=image_data["variant_images__variant"], image__pk=image_data["pk"],
)[1:].values_list("pk", flat=True)
variant_image_ids_to_remove += ids
VariantImage.objects.filter(pk__in=variant_image_ids_to_remove).delete()
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.RunPython(
remove_variant_image_duplicates, migrations.RunPython.noop
),
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
|
from django.db import migrations
+ from django.db.models import Count
+
+
+ def remove_variant_image_duplicates(apps, schema_editor):
+ ProductImage = apps.get_model("product", "ProductImage")
+ VariantImage = apps.get_model("product", "VariantImage")
+
+ duplicated_images = (
+ ProductImage.objects.values("pk", "variant_images__variant")
+ .annotate(variant_count=Count("variant_images__variant"))
+ .filter(variant_count__gte=2)
+ )
+
+ variant_image_ids_to_remove = []
+ for image_data in duplicated_images:
+ ids = VariantImage.objects.filter(
+ variant=image_data["variant_images__variant"], image__pk=image_data["pk"],
+ )[1:].values_list("pk", flat=True)
+ variant_image_ids_to_remove += ids
+
+ VariantImage.objects.filter(pk__in=variant_image_ids_to_remove).delete()
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
+ migrations.RunPython(
+ remove_variant_image_duplicates, migrations.RunPython.noop
+ ),
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
|
7821db4fb30bc013f8ae71c779faae5f6864da1d
|
falafel/__init__.py
|
falafel/__init__.py
|
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
|
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
RULES_STATUS = {}
"""
Mapping of dictionaries containing nvr and commitid for each rule repo included
in this instance
{"rule_repo_1": {"version": nvr(), "commit": sha1}}
"""
def add_status(name, nvr, commit):
"""
Rule repositories should call this method in their package __init__ to
register their version information.
"""
RULES_STATUS[name] = {"version": nvr, "commit": commit}
|
Allow rule repos to provide version information
|
Allow rule repos to provide version information
Added a new method to the root package `add_status`.
Rule repos should use it during initialization:
import falafel
falafel.add_status(name="my_rule_repo", nvr="my-rules-1.0.0-1", commit="abcdef")
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
+ RULES_STATUS = {}
+ """
+ Mapping of dictionaries containing nvr and commitid for each rule repo included
+ in this instance
+
+ {"rule_repo_1": {"version": nvr(), "commit": sha1}}
+ """
+
+
+ def add_status(name, nvr, commit):
+ """
+ Rule repositories should call this method in their package __init__ to
+ register their version information.
+ """
+ RULES_STATUS[name] = {"version": nvr, "commit": commit}
+
|
Allow rule repos to provide version information
|
## Code Before:
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
## Instruction:
Allow rule repos to provide version information
## Code After:
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
RULES_STATUS = {}
"""
Mapping of dictionaries containing nvr and commitid for each rule repo included
in this instance
{"rule_repo_1": {"version": nvr(), "commit": sha1}}
"""
def add_status(name, nvr, commit):
"""
Rule repositories should call this method in their package __init__ to
register their version information.
"""
RULES_STATUS[name] = {"version": nvr, "commit": commit}
|
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
+
+ RULES_STATUS = {}
+ """
+ Mapping of dictionaries containing nvr and commitid for each rule repo included
+ in this instance
+
+ {"rule_repo_1": {"version": nvr(), "commit": sha1}}
+ """
+
+
+ def add_status(name, nvr, commit):
+ """
+ Rule repositories should call this method in their package __init__ to
+ register their version information.
+ """
+ RULES_STATUS[name] = {"version": nvr, "commit": commit}
|
a2142fb8a592a9ad9b4870d4685ec02cfa621a77
|
tests/settings.py
|
tests/settings.py
|
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
trusted_root_data = urllib.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer", TRUSTED_ROOT_FILE
)
|
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
try:
trusted_root_data = urllib.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer",
TRUSTED_ROOT_FILE,
)
except AttributeError:
# Python 3
trusted_root_data = urllib.request.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer",
TRUSTED_ROOT_FILE,
)
|
Fix cert retreival on python 3
|
Fix cert retreival on python 3
|
Python
|
mit
|
educreations/python-iap
|
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
+ try:
- trusted_root_data = urllib.urlretrieve(
+ trusted_root_data = urllib.urlretrieve(
- "https://www.apple.com/appleca/AppleIncRootCertificate.cer", TRUSTED_ROOT_FILE
+ "https://www.apple.com/appleca/AppleIncRootCertificate.cer",
- )
+ TRUSTED_ROOT_FILE,
+ )
+ except AttributeError:
+ # Python 3
+ trusted_root_data = urllib.request.urlretrieve(
+ "https://www.apple.com/appleca/AppleIncRootCertificate.cer",
+ TRUSTED_ROOT_FILE,
+ )
|
Fix cert retreival on python 3
|
## Code Before:
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
trusted_root_data = urllib.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer", TRUSTED_ROOT_FILE
)
## Instruction:
Fix cert retreival on python 3
## Code After:
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
try:
trusted_root_data = urllib.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer",
TRUSTED_ROOT_FILE,
)
except AttributeError:
# Python 3
trusted_root_data = urllib.request.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer",
TRUSTED_ROOT_FILE,
)
|
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
+ try:
- trusted_root_data = urllib.urlretrieve(
+ trusted_root_data = urllib.urlretrieve(
? ++++
- "https://www.apple.com/appleca/AppleIncRootCertificate.cer", TRUSTED_ROOT_FILE
? ------------------
+ "https://www.apple.com/appleca/AppleIncRootCertificate.cer",
? ++++
- )
+ TRUSTED_ROOT_FILE,
+ )
+ except AttributeError:
+ # Python 3
+ trusted_root_data = urllib.request.urlretrieve(
+ "https://www.apple.com/appleca/AppleIncRootCertificate.cer",
+ TRUSTED_ROOT_FILE,
+ )
|
97bcf652a18808d89c8de2235e2b32ae933036b6
|
tests/options_tests.py
|
tests/options_tests.py
|
from nose.tools import istest, assert_equal
from mammoth import style_reader
from mammoth.options import read_options, _default_style_map
@istest
def default_style_map_is_used_if_style_map_is_not_set():
assert_equal(_default_style_map, read_options({})["style_map"])
@istest
def custom_style_mappings_are_prepended_to_default_style_mappings():
style_map = read_options({
"style_map": "p.SectionTitle => h2"
})["style_map"]
assert_equal(style_reader.read_style("p.SectionTitle => h2"), style_map[0])
assert_equal(_default_style_map, style_map[1:])
@istest
def default_style_mappings_are_ignored_if_include_default_style_map_is_false():
style_map = read_options({
"style_map": "p.SectionTitle => h2",
"include_default_style_map": False
})["style_map"]
assert_equal([style_reader.read_style("p.SectionTitle => h2")], style_map)
|
from nose.tools import istest, assert_equal
from mammoth import style_reader
from mammoth.options import read_options, _default_style_map
@istest
def default_style_map_is_used_if_style_map_is_not_set():
assert_equal(_default_style_map, read_options({})["style_map"])
@istest
def custom_style_mappings_are_prepended_to_default_style_mappings():
style_map = read_options({
"style_map": "p.SectionTitle => h2"
})["style_map"]
assert_equal(style_reader.read_style("p.SectionTitle => h2"), style_map[0])
assert_equal(_default_style_map, style_map[1:])
@istest
def default_style_mappings_are_ignored_if_include_default_style_map_is_false():
style_map = read_options({
"style_map": "p.SectionTitle => h2",
"include_default_style_map": False
})["style_map"]
assert_equal([style_reader.read_style("p.SectionTitle => h2")], style_map)
@istest
def lines_starting_with_hash_in_custom_style_map_are_ignored():
style_map = read_options({
"style_map": "#p.SectionTitle => h3\np.SectionTitle => h2",
"include_default_style_map": False
})["style_map"]
assert_equal([style_reader.read_style("p.SectionTitle => h2")], style_map)
|
Add test to ensure that style map lines beginning with hash are ignored
|
Add test to ensure that style map lines beginning with hash are ignored
|
Python
|
bsd-2-clause
|
mwilliamson/python-mammoth
|
from nose.tools import istest, assert_equal
from mammoth import style_reader
from mammoth.options import read_options, _default_style_map
@istest
def default_style_map_is_used_if_style_map_is_not_set():
assert_equal(_default_style_map, read_options({})["style_map"])
@istest
def custom_style_mappings_are_prepended_to_default_style_mappings():
style_map = read_options({
"style_map": "p.SectionTitle => h2"
})["style_map"]
assert_equal(style_reader.read_style("p.SectionTitle => h2"), style_map[0])
assert_equal(_default_style_map, style_map[1:])
@istest
def default_style_mappings_are_ignored_if_include_default_style_map_is_false():
style_map = read_options({
"style_map": "p.SectionTitle => h2",
"include_default_style_map": False
})["style_map"]
assert_equal([style_reader.read_style("p.SectionTitle => h2")], style_map)
+
+ @istest
+ def lines_starting_with_hash_in_custom_style_map_are_ignored():
+ style_map = read_options({
+ "style_map": "#p.SectionTitle => h3\np.SectionTitle => h2",
+ "include_default_style_map": False
+ })["style_map"]
+ assert_equal([style_reader.read_style("p.SectionTitle => h2")], style_map)
+
|
Add test to ensure that style map lines beginning with hash are ignored
|
## Code Before:
from nose.tools import istest, assert_equal
from mammoth import style_reader
from mammoth.options import read_options, _default_style_map
@istest
def default_style_map_is_used_if_style_map_is_not_set():
assert_equal(_default_style_map, read_options({})["style_map"])
@istest
def custom_style_mappings_are_prepended_to_default_style_mappings():
style_map = read_options({
"style_map": "p.SectionTitle => h2"
})["style_map"]
assert_equal(style_reader.read_style("p.SectionTitle => h2"), style_map[0])
assert_equal(_default_style_map, style_map[1:])
@istest
def default_style_mappings_are_ignored_if_include_default_style_map_is_false():
style_map = read_options({
"style_map": "p.SectionTitle => h2",
"include_default_style_map": False
})["style_map"]
assert_equal([style_reader.read_style("p.SectionTitle => h2")], style_map)
## Instruction:
Add test to ensure that style map lines beginning with hash are ignored
## Code After:
from nose.tools import istest, assert_equal
from mammoth import style_reader
from mammoth.options import read_options, _default_style_map
@istest
def default_style_map_is_used_if_style_map_is_not_set():
assert_equal(_default_style_map, read_options({})["style_map"])
@istest
def custom_style_mappings_are_prepended_to_default_style_mappings():
style_map = read_options({
"style_map": "p.SectionTitle => h2"
})["style_map"]
assert_equal(style_reader.read_style("p.SectionTitle => h2"), style_map[0])
assert_equal(_default_style_map, style_map[1:])
@istest
def default_style_mappings_are_ignored_if_include_default_style_map_is_false():
style_map = read_options({
"style_map": "p.SectionTitle => h2",
"include_default_style_map": False
})["style_map"]
assert_equal([style_reader.read_style("p.SectionTitle => h2")], style_map)
@istest
def lines_starting_with_hash_in_custom_style_map_are_ignored():
style_map = read_options({
"style_map": "#p.SectionTitle => h3\np.SectionTitle => h2",
"include_default_style_map": False
})["style_map"]
assert_equal([style_reader.read_style("p.SectionTitle => h2")], style_map)
|
from nose.tools import istest, assert_equal
from mammoth import style_reader
from mammoth.options import read_options, _default_style_map
@istest
def default_style_map_is_used_if_style_map_is_not_set():
assert_equal(_default_style_map, read_options({})["style_map"])
@istest
def custom_style_mappings_are_prepended_to_default_style_mappings():
style_map = read_options({
"style_map": "p.SectionTitle => h2"
})["style_map"]
assert_equal(style_reader.read_style("p.SectionTitle => h2"), style_map[0])
assert_equal(_default_style_map, style_map[1:])
@istest
def default_style_mappings_are_ignored_if_include_default_style_map_is_false():
style_map = read_options({
"style_map": "p.SectionTitle => h2",
"include_default_style_map": False
})["style_map"]
assert_equal([style_reader.read_style("p.SectionTitle => h2")], style_map)
+
+
+ @istest
+ def lines_starting_with_hash_in_custom_style_map_are_ignored():
+ style_map = read_options({
+ "style_map": "#p.SectionTitle => h3\np.SectionTitle => h2",
+ "include_default_style_map": False
+ })["style_map"]
+ assert_equal([style_reader.read_style("p.SectionTitle => h2")], style_map)
|
7ddb1b3d0139ef8b6a7badcb2c6bef6a0e35e88a
|
hooks/post_gen_project.py
|
hooks/post_gen_project.py
|
import os
package_dir = '{{cookiecutter.repo_name}}'
old_kv_file = os.path.join(package_dir, '{{cookiecutter.app_class_name}}.kv')
lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower()
if (lower_app_class_name.endswith('app')):
lower_app_class_name = lower_app_class_name[:-3]
new_kv_file = os.path.join(package_dir, '{}.kv'.format(lower_app_class_name))
os.rename(old_kv_file, new_kv_file)
|
def rename_kv_file():
"""Rename the generated kv file to be compatible with the original kivy kv
file detection of `App.load_kv`.
"""
import os
package_dir = '{{cookiecutter.repo_name}}'
old_kv_file = os.path.join(
package_dir, '{{cookiecutter.app_class_name}}.kv'
)
lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower()
if (lower_app_class_name.endswith('app')):
lower_app_class_name = lower_app_class_name[:-3]
new_kv_file = os.path.join(
package_dir, '{}.kv'.format(lower_app_class_name)
)
os.rename(old_kv_file, new_kv_file)
rename_kv_file()
|
Use a function to rename the kv file in hooks
|
Use a function to rename the kv file in hooks
|
Python
|
mit
|
hackebrot/cookiedozer,hackebrot/cookiedozer
|
-
- import os
+ def rename_kv_file():
+ """Rename the generated kv file to be compatible with the original kivy kv
+ file detection of `App.load_kv`.
+ """
+ import os
- package_dir = '{{cookiecutter.repo_name}}'
+ package_dir = '{{cookiecutter.repo_name}}'
- old_kv_file = os.path.join(package_dir, '{{cookiecutter.app_class_name}}.kv')
+ old_kv_file = os.path.join(
+ package_dir, '{{cookiecutter.app_class_name}}.kv'
+ )
- lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower()
+ lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower()
- if (lower_app_class_name.endswith('app')):
+ if (lower_app_class_name.endswith('app')):
- lower_app_class_name = lower_app_class_name[:-3]
+ lower_app_class_name = lower_app_class_name[:-3]
- new_kv_file = os.path.join(package_dir, '{}.kv'.format(lower_app_class_name))
+ new_kv_file = os.path.join(
+ package_dir, '{}.kv'.format(lower_app_class_name)
+ )
- os.rename(old_kv_file, new_kv_file)
+ os.rename(old_kv_file, new_kv_file)
+
+ rename_kv_file()
+
|
Use a function to rename the kv file in hooks
|
## Code Before:
import os
package_dir = '{{cookiecutter.repo_name}}'
old_kv_file = os.path.join(package_dir, '{{cookiecutter.app_class_name}}.kv')
lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower()
if (lower_app_class_name.endswith('app')):
lower_app_class_name = lower_app_class_name[:-3]
new_kv_file = os.path.join(package_dir, '{}.kv'.format(lower_app_class_name))
os.rename(old_kv_file, new_kv_file)
## Instruction:
Use a function to rename the kv file in hooks
## Code After:
def rename_kv_file():
"""Rename the generated kv file to be compatible with the original kivy kv
file detection of `App.load_kv`.
"""
import os
package_dir = '{{cookiecutter.repo_name}}'
old_kv_file = os.path.join(
package_dir, '{{cookiecutter.app_class_name}}.kv'
)
lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower()
if (lower_app_class_name.endswith('app')):
lower_app_class_name = lower_app_class_name[:-3]
new_kv_file = os.path.join(
package_dir, '{}.kv'.format(lower_app_class_name)
)
os.rename(old_kv_file, new_kv_file)
rename_kv_file()
|
-
- import os
+ def rename_kv_file():
+ """Rename the generated kv file to be compatible with the original kivy kv
+ file detection of `App.load_kv`.
+ """
+ import os
- package_dir = '{{cookiecutter.repo_name}}'
+ package_dir = '{{cookiecutter.repo_name}}'
? ++++
+ old_kv_file = os.path.join(
- old_kv_file = os.path.join(package_dir, '{{cookiecutter.app_class_name}}.kv')
? ----------- - ^^^^^^^^^^^^^ -
+ package_dir, '{{cookiecutter.app_class_name}}.kv'
? ^^^^^^
+ )
- lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower()
+ lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower()
? ++++
- if (lower_app_class_name.endswith('app')):
+ if (lower_app_class_name.endswith('app')):
? ++++
- lower_app_class_name = lower_app_class_name[:-3]
+ lower_app_class_name = lower_app_class_name[:-3]
? ++++
+ new_kv_file = os.path.join(
- new_kv_file = os.path.join(package_dir, '{}.kv'.format(lower_app_class_name))
? ----------- - ^^^^^^^^^^^^^ -
+ package_dir, '{}.kv'.format(lower_app_class_name)
? ^^^^^^
+ )
- os.rename(old_kv_file, new_kv_file)
+ os.rename(old_kv_file, new_kv_file)
? ++++
+
+
+ rename_kv_file()
|
7c9ed9fbdc1b16ae1d59c1099e7190e6297bf584
|
util/chplenv/chpl_tasks.py
|
util/chplenv/chpl_tasks.py
|
import sys, os
import chpl_arch, chpl_platform, chpl_compiler
from utils import memoize
import utils
@memoize
def get():
tasks_val = os.environ.get('CHPL_TASKS')
if not tasks_val:
arch_val = chpl_arch.get('target', get_lcd=True)
platform_val = chpl_platform.get()
compiler_val = chpl_compiler.get('target')
# use muxed on cray-x* machines using the module and supported compiler
if (platform_val.startswith('cray-x') and
utils.using_chapel_module() and
compiler_val in ('cray-prgenv-gnu', 'cray-prgenv-intel') and
arch_val != 'knc'):
tasks_val = 'muxed'
elif (arch_val == 'knc' or
platform_val.startswith('cygwin') or
platform_val.startswith('netbsd') or
compiler_val == 'cray-prgenv-cray'):
tasks_val = 'fifo'
else:
tasks_val = 'qthreads'
return tasks_val
def _main():
tasks_val = get()
sys.stdout.write("{0}\n".format(tasks_val))
if __name__ == '__main__':
_main()
|
import sys, os
import chpl_arch, chpl_platform, chpl_compiler, chpl_comm
from utils import memoize
import utils
@memoize
def get():
tasks_val = os.environ.get('CHPL_TASKS')
if not tasks_val:
arch_val = chpl_arch.get('target', get_lcd=True)
platform_val = chpl_platform.get()
compiler_val = chpl_compiler.get('target')
comm_val = chpl_comm.get()
# use muxed on cray-x* machines using the module and supported compiler
if (comm_val == 'ugni' and
platform_val.startswith('cray-x') and
utils.using_chapel_module() and
compiler_val in ('cray-prgenv-gnu', 'cray-prgenv-intel') and
arch_val != 'knc'):
tasks_val = 'muxed'
elif (arch_val == 'knc' or
platform_val.startswith('cygwin') or
platform_val.startswith('netbsd') or
compiler_val == 'cray-prgenv-cray'):
tasks_val = 'fifo'
else:
tasks_val = 'qthreads'
return tasks_val
def _main():
tasks_val = get()
sys.stdout.write("{0}\n".format(tasks_val))
if __name__ == '__main__':
_main()
|
Update chpl_task to only default to muxed when ugni comm is used.
|
Update chpl_task to only default to muxed when ugni comm is used.
This expands upon (and fixes) #1640 and #1635.
* [ ] Run printchplenv on mac and confirm it still works.
* [ ] Emulate cray-x* with module and confirm comm, tasks are ugni, muxed.
```bash
(
export CHPL_MODULE_HOME=$CHPL_HOME
export CHPL_HOST_PLATFORM=cray-xc
export CHPL_TARGET_COMPILER=cray-prgenv-gnu
printchplenv
)
```
* [ ] Emulate cray-x* with module but not supported compiler and confirm comm, tasks are gasnet, default tasks.
```bash
(
export CHPL_MODULE_HOME=$CHPL_HOME
export CHPL_HOST_PLATFORM=cray-xc
export CHPL_TARGET_COMPILER=cray-prgenv-cray
printchplenv
)
```
* [ ] Emulate cray-x* without module and confirm comm, tasks settings are gasnet, default tasks.
```bash
(
export CHPL_HOST_PLATFORM=cray-xc
export CHPL_TARGET_COMPILER=cray-prgenv-gnu
printchplenv
)
```
* [ ] Emulate cray-x* with module and intel compiler, but knc target arch and confirm comm, tasks are gasnet, default tasks.
```bash
(
export CHPL_MODULE_HOME=$CHPL_HOME
export CHPL_HOST_PLATFORM=cray-xc
export CHPL_TARGET_COMPILER=cray-prgenv-intel
export CRAY_CPU_TARGET=knc
printchplenv
)
```
* [ ] Emulate cray-x* with module, supported compiler, but none ugni comm and confirm tasks are default tasks.
```bash
(
export CHPL_MODULE_HOME=$CHPL_HOME
export CHPL_HOST_PLATFORM=cray-xc
export CHPL_TARGET_COMPILER=cray-prgenv-gnu
export CHPL_COMM=none
printchplenv
)
```
|
Python
|
apache-2.0
|
CoryMcCartan/chapel,chizarlicious/chapel,chizarlicious/chapel,hildeth/chapel,CoryMcCartan/chapel,chizarlicious/chapel,CoryMcCartan/chapel,chizarlicious/chapel,chizarlicious/chapel,CoryMcCartan/chapel,hildeth/chapel,CoryMcCartan/chapel,chizarlicious/chapel,hildeth/chapel,hildeth/chapel,hildeth/chapel,hildeth/chapel,hildeth/chapel,chizarlicious/chapel,CoryMcCartan/chapel,CoryMcCartan/chapel
|
import sys, os
- import chpl_arch, chpl_platform, chpl_compiler
+ import chpl_arch, chpl_platform, chpl_compiler, chpl_comm
from utils import memoize
import utils
@memoize
def get():
tasks_val = os.environ.get('CHPL_TASKS')
if not tasks_val:
arch_val = chpl_arch.get('target', get_lcd=True)
platform_val = chpl_platform.get()
compiler_val = chpl_compiler.get('target')
+ comm_val = chpl_comm.get()
# use muxed on cray-x* machines using the module and supported compiler
+ if (comm_val == 'ugni' and
- if (platform_val.startswith('cray-x') and
+ platform_val.startswith('cray-x') and
utils.using_chapel_module() and
compiler_val in ('cray-prgenv-gnu', 'cray-prgenv-intel') and
arch_val != 'knc'):
tasks_val = 'muxed'
elif (arch_val == 'knc' or
platform_val.startswith('cygwin') or
platform_val.startswith('netbsd') or
compiler_val == 'cray-prgenv-cray'):
tasks_val = 'fifo'
else:
tasks_val = 'qthreads'
return tasks_val
def _main():
tasks_val = get()
sys.stdout.write("{0}\n".format(tasks_val))
if __name__ == '__main__':
_main()
|
Update chpl_task to only default to muxed when ugni comm is used.
|
## Code Before:
import sys, os
import chpl_arch, chpl_platform, chpl_compiler
from utils import memoize
import utils
@memoize
def get():
tasks_val = os.environ.get('CHPL_TASKS')
if not tasks_val:
arch_val = chpl_arch.get('target', get_lcd=True)
platform_val = chpl_platform.get()
compiler_val = chpl_compiler.get('target')
# use muxed on cray-x* machines using the module and supported compiler
if (platform_val.startswith('cray-x') and
utils.using_chapel_module() and
compiler_val in ('cray-prgenv-gnu', 'cray-prgenv-intel') and
arch_val != 'knc'):
tasks_val = 'muxed'
elif (arch_val == 'knc' or
platform_val.startswith('cygwin') or
platform_val.startswith('netbsd') or
compiler_val == 'cray-prgenv-cray'):
tasks_val = 'fifo'
else:
tasks_val = 'qthreads'
return tasks_val
def _main():
tasks_val = get()
sys.stdout.write("{0}\n".format(tasks_val))
if __name__ == '__main__':
_main()
## Instruction:
Update chpl_task to only default to muxed when ugni comm is used.
## Code After:
import sys, os
import chpl_arch, chpl_platform, chpl_compiler, chpl_comm
from utils import memoize
import utils
@memoize
def get():
tasks_val = os.environ.get('CHPL_TASKS')
if not tasks_val:
arch_val = chpl_arch.get('target', get_lcd=True)
platform_val = chpl_platform.get()
compiler_val = chpl_compiler.get('target')
comm_val = chpl_comm.get()
# use muxed on cray-x* machines using the module and supported compiler
if (comm_val == 'ugni' and
platform_val.startswith('cray-x') and
utils.using_chapel_module() and
compiler_val in ('cray-prgenv-gnu', 'cray-prgenv-intel') and
arch_val != 'knc'):
tasks_val = 'muxed'
elif (arch_val == 'knc' or
platform_val.startswith('cygwin') or
platform_val.startswith('netbsd') or
compiler_val == 'cray-prgenv-cray'):
tasks_val = 'fifo'
else:
tasks_val = 'qthreads'
return tasks_val
def _main():
tasks_val = get()
sys.stdout.write("{0}\n".format(tasks_val))
if __name__ == '__main__':
_main()
|
import sys, os
- import chpl_arch, chpl_platform, chpl_compiler
+ import chpl_arch, chpl_platform, chpl_compiler, chpl_comm
? +++++++++++
from utils import memoize
import utils
@memoize
def get():
tasks_val = os.environ.get('CHPL_TASKS')
if not tasks_val:
arch_val = chpl_arch.get('target', get_lcd=True)
platform_val = chpl_platform.get()
compiler_val = chpl_compiler.get('target')
+ comm_val = chpl_comm.get()
# use muxed on cray-x* machines using the module and supported compiler
+ if (comm_val == 'ugni' and
- if (platform_val.startswith('cray-x') and
? -- ^
+ platform_val.startswith('cray-x') and
? ^^^^^^^
utils.using_chapel_module() and
compiler_val in ('cray-prgenv-gnu', 'cray-prgenv-intel') and
arch_val != 'knc'):
tasks_val = 'muxed'
elif (arch_val == 'knc' or
platform_val.startswith('cygwin') or
platform_val.startswith('netbsd') or
compiler_val == 'cray-prgenv-cray'):
tasks_val = 'fifo'
else:
tasks_val = 'qthreads'
return tasks_val
def _main():
tasks_val = get()
sys.stdout.write("{0}\n".format(tasks_val))
if __name__ == '__main__':
_main()
|
2d0b44d65a8167a105cbc63e704735b1c360e0c4
|
api/core/urls.py
|
api/core/urls.py
|
from django.urls import path, re_path
from django.conf.urls.static import static
from django.conf import settings
from . import views
urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [
path('go/<path:path>', views.redirector, name='redirector'),
re_path('^', views.index, name='index'),
]
|
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.auth.views import logout
from django.urls import path, re_path
from . import views
urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [
path('go/<path:path>', views.redirector, name='redirector'),
path('logout', logout, {'next_page': '/'}),
re_path('^', views.index, name='index'),
]
|
Handle logout on the backend
|
Handle logout on the backend
|
Python
|
mit
|
citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement
|
+ from django.conf import settings
+ from django.conf.urls.static import static
+ from django.contrib.auth.views import logout
from django.urls import path, re_path
- from django.conf.urls.static import static
- from django.conf import settings
from . import views
urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [
path('go/<path:path>', views.redirector, name='redirector'),
+ path('logout', logout, {'next_page': '/'}),
re_path('^', views.index, name='index'),
]
|
Handle logout on the backend
|
## Code Before:
from django.urls import path, re_path
from django.conf.urls.static import static
from django.conf import settings
from . import views
urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [
path('go/<path:path>', views.redirector, name='redirector'),
re_path('^', views.index, name='index'),
]
## Instruction:
Handle logout on the backend
## Code After:
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.auth.views import logout
from django.urls import path, re_path
from . import views
urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [
path('go/<path:path>', views.redirector, name='redirector'),
path('logout', logout, {'next_page': '/'}),
re_path('^', views.index, name='index'),
]
|
+ from django.conf import settings
+ from django.conf.urls.static import static
+ from django.contrib.auth.views import logout
from django.urls import path, re_path
- from django.conf.urls.static import static
- from django.conf import settings
from . import views
urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [
path('go/<path:path>', views.redirector, name='redirector'),
+ path('logout', logout, {'next_page': '/'}),
re_path('^', views.index, name='index'),
]
|
8b4b5705907e1ec5f9dd3148560dc1bf4cd5b9b7
|
bin/detail/get_nmake_environment.py
|
bin/detail/get_nmake_environment.py
|
import detail.util
import os
import sys
def get(arch, vs_version):
vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version)
vs_path = os.getenv(vs_path_env)
if not vs_path:
sys.exit(
'Environment variable {} is empty, '
'looks like Visual Studio {} is not installed'.format(
vs_path_env, vs_version
)
)
vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC')
if not os.path.isdir(vcvarsall_dir):
sys.exit(
'Directory `{}` not exists '
'({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat')
if not os.path.isfile(vcvarsall_path):
sys.exit(
'File vcvarsall.bat not found in directory '
'`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
|
import detail.util
import os
import sys
def get(arch, vs_version):
vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version)
vs_path = os.getenv(vs_path_env)
if not vs_path:
sys.exit(
'Environment variable {} is empty, '
'looks like Visual Studio {} is not installed'.format(
vs_path_env, vs_version
)
)
if vs_version == '15':
vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build')
else:
vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC')
if not os.path.isdir(vcvarsall_dir):
sys.exit(
'Directory `{}` not exists '
'({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat')
if not os.path.isfile(vcvarsall_path):
sys.exit(
'File vcvarsall.bat not found in directory '
'`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
|
Fix vcvarsall_dir for Visual Studio 2017
|
polly.py: Fix vcvarsall_dir for Visual Studio 2017
[skip ci]
|
Python
|
bsd-2-clause
|
idscan/polly,idscan/polly,ruslo/polly,ruslo/polly
|
import detail.util
import os
import sys
def get(arch, vs_version):
vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version)
vs_path = os.getenv(vs_path_env)
if not vs_path:
sys.exit(
'Environment variable {} is empty, '
'looks like Visual Studio {} is not installed'.format(
vs_path_env, vs_version
)
)
+
+ if vs_version == '15':
+ vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build')
+ else:
- vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC')
+ vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC')
+
if not os.path.isdir(vcvarsall_dir):
sys.exit(
'Directory `{}` not exists '
'({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat')
if not os.path.isfile(vcvarsall_path):
sys.exit(
'File vcvarsall.bat not found in directory '
'`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
|
Fix vcvarsall_dir for Visual Studio 2017
|
## Code Before:
import detail.util
import os
import sys
def get(arch, vs_version):
vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version)
vs_path = os.getenv(vs_path_env)
if not vs_path:
sys.exit(
'Environment variable {} is empty, '
'looks like Visual Studio {} is not installed'.format(
vs_path_env, vs_version
)
)
vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC')
if not os.path.isdir(vcvarsall_dir):
sys.exit(
'Directory `{}` not exists '
'({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat')
if not os.path.isfile(vcvarsall_path):
sys.exit(
'File vcvarsall.bat not found in directory '
'`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
## Instruction:
Fix vcvarsall_dir for Visual Studio 2017
## Code After:
import detail.util
import os
import sys
def get(arch, vs_version):
vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version)
vs_path = os.getenv(vs_path_env)
if not vs_path:
sys.exit(
'Environment variable {} is empty, '
'looks like Visual Studio {} is not installed'.format(
vs_path_env, vs_version
)
)
if vs_version == '15':
vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build')
else:
vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC')
if not os.path.isdir(vcvarsall_dir):
sys.exit(
'Directory `{}` not exists '
'({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat')
if not os.path.isfile(vcvarsall_path):
sys.exit(
'File vcvarsall.bat not found in directory '
'`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
|
import detail.util
import os
import sys
def get(arch, vs_version):
vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version)
vs_path = os.getenv(vs_path_env)
if not vs_path:
sys.exit(
'Environment variable {} is empty, '
'looks like Visual Studio {} is not installed'.format(
vs_path_env, vs_version
)
)
+
+ if vs_version == '15':
+ vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build')
+ else:
- vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC')
+ vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC')
? ++
+
if not os.path.isdir(vcvarsall_dir):
sys.exit(
'Directory `{}` not exists '
'({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat')
if not os.path.isfile(vcvarsall_path):
sys.exit(
'File vcvarsall.bat not found in directory '
'`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
|
62b177e0a0fd7adbabe72d04befff566f05e9a74
|
scudcloud/notifier.py
|
scudcloud/notifier.py
|
from dbus.exceptions import DBusException
try:
import gi
gi.require_version('Notify', '0.7')
from gi.repository import Notify
except (ImportError, AttributeError):
from scudcloud import notify2
Notify = None
class Notifier(object):
def __init__(self, app_name, icon):
self.icon = icon
try:
if Notify is not None:
Notify.init(app_name)
self.notifier = Notify
else:
notify2.init(app_name)
self.notifier = notify2
self.enabled = True
except DBusException:
print("WARNING: No notification daemon found! "
"Notifications will be ignored.")
self.enabled = False
def notify(self, title, message, icon=None):
if not self.enabled:
return
if icon is None:
icon = self.icon
if Notify is not None:
notice = self.notifier.Notification.new(title, message, icon)
else:
notice = notify2.Notification(title, message, icon)
notice.set_hint_string('x-canonical-append', '')
try:
notice.show()
except:
pass
|
from dbus.exceptions import DBusException
try:
import gi
gi.require_version('Notify', '0.7')
from gi.repository import Notify
except (ImportError, AttributeError, ValueError):
from scudcloud import notify2
Notify = None
class Notifier(object):
def __init__(self, app_name, icon):
self.icon = icon
try:
if Notify is not None:
Notify.init(app_name)
self.notifier = Notify
else:
notify2.init(app_name)
self.notifier = notify2
self.enabled = True
except DBusException:
print("WARNING: No notification daemon found! "
"Notifications will be ignored.")
self.enabled = False
def notify(self, title, message, icon=None):
if not self.enabled:
return
if icon is None:
icon = self.icon
if Notify is not None:
notice = self.notifier.Notification.new(title, message, icon)
else:
notice = notify2.Notification(title, message, icon)
notice.set_hint_string('x-canonical-append', '')
try:
notice.show()
except:
pass
|
Allow ValueError as a notify exception
|
Allow ValueError as a notify exception
|
Python
|
mit
|
raelgc/scudcloud,raelgc/scudcloud,raelgc/scudcloud
|
from dbus.exceptions import DBusException
try:
import gi
gi.require_version('Notify', '0.7')
from gi.repository import Notify
- except (ImportError, AttributeError):
+ except (ImportError, AttributeError, ValueError):
from scudcloud import notify2
Notify = None
class Notifier(object):
def __init__(self, app_name, icon):
self.icon = icon
try:
if Notify is not None:
Notify.init(app_name)
self.notifier = Notify
else:
notify2.init(app_name)
self.notifier = notify2
self.enabled = True
except DBusException:
print("WARNING: No notification daemon found! "
"Notifications will be ignored.")
self.enabled = False
def notify(self, title, message, icon=None):
if not self.enabled:
return
if icon is None:
icon = self.icon
if Notify is not None:
notice = self.notifier.Notification.new(title, message, icon)
else:
notice = notify2.Notification(title, message, icon)
notice.set_hint_string('x-canonical-append', '')
try:
notice.show()
except:
pass
|
Allow ValueError as a notify exception
|
## Code Before:
from dbus.exceptions import DBusException
try:
import gi
gi.require_version('Notify', '0.7')
from gi.repository import Notify
except (ImportError, AttributeError):
from scudcloud import notify2
Notify = None
class Notifier(object):
def __init__(self, app_name, icon):
self.icon = icon
try:
if Notify is not None:
Notify.init(app_name)
self.notifier = Notify
else:
notify2.init(app_name)
self.notifier = notify2
self.enabled = True
except DBusException:
print("WARNING: No notification daemon found! "
"Notifications will be ignored.")
self.enabled = False
def notify(self, title, message, icon=None):
if not self.enabled:
return
if icon is None:
icon = self.icon
if Notify is not None:
notice = self.notifier.Notification.new(title, message, icon)
else:
notice = notify2.Notification(title, message, icon)
notice.set_hint_string('x-canonical-append', '')
try:
notice.show()
except:
pass
## Instruction:
Allow ValueError as a notify exception
## Code After:
from dbus.exceptions import DBusException
try:
import gi
gi.require_version('Notify', '0.7')
from gi.repository import Notify
except (ImportError, AttributeError, ValueError):
from scudcloud import notify2
Notify = None
class Notifier(object):
def __init__(self, app_name, icon):
self.icon = icon
try:
if Notify is not None:
Notify.init(app_name)
self.notifier = Notify
else:
notify2.init(app_name)
self.notifier = notify2
self.enabled = True
except DBusException:
print("WARNING: No notification daemon found! "
"Notifications will be ignored.")
self.enabled = False
def notify(self, title, message, icon=None):
if not self.enabled:
return
if icon is None:
icon = self.icon
if Notify is not None:
notice = self.notifier.Notification.new(title, message, icon)
else:
notice = notify2.Notification(title, message, icon)
notice.set_hint_string('x-canonical-append', '')
try:
notice.show()
except:
pass
|
from dbus.exceptions import DBusException
try:
import gi
gi.require_version('Notify', '0.7')
from gi.repository import Notify
- except (ImportError, AttributeError):
+ except (ImportError, AttributeError, ValueError):
? ++++++++++++
from scudcloud import notify2
Notify = None
class Notifier(object):
def __init__(self, app_name, icon):
self.icon = icon
try:
if Notify is not None:
Notify.init(app_name)
self.notifier = Notify
else:
notify2.init(app_name)
self.notifier = notify2
self.enabled = True
except DBusException:
print("WARNING: No notification daemon found! "
"Notifications will be ignored.")
self.enabled = False
def notify(self, title, message, icon=None):
if not self.enabled:
return
if icon is None:
icon = self.icon
if Notify is not None:
notice = self.notifier.Notification.new(title, message, icon)
else:
notice = notify2.Notification(title, message, icon)
notice.set_hint_string('x-canonical-append', '')
try:
notice.show()
except:
pass
|
cc8f0760aa5497d2285dc85c6f3c17c6ce327c35
|
core/__init__.py
|
core/__init__.py
|
import logging
try:
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import datastore_query
logging.info('Imported official google datastore_{rpc,query}')
except ImportError:
logging.warning('Importing local datastore_{rpc,query}')
from . import datastore_rpc
from . import datastore_query
from . import monkey
|
import logging
import sys
try:
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import datastore_query
sys.modules['core.datastore_rpc'] = datastore_rpc
sys.modules['core.datastore_query'] = datastore_query
logging.info('Imported official google datastore_{rpc,query}')
except ImportError:
logging.warning('Importing local datastore_{rpc,query}')
from . import datastore_rpc
from . import datastore_query
from . import monkey
|
Make official google imports actually work.
|
Make official google imports actually work.
|
Python
|
apache-2.0
|
GoogleCloudPlatform/datastore-ndb-python,GoogleCloudPlatform/datastore-ndb-python
|
import logging
+ import sys
try:
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import datastore_query
+ sys.modules['core.datastore_rpc'] = datastore_rpc
+ sys.modules['core.datastore_query'] = datastore_query
logging.info('Imported official google datastore_{rpc,query}')
except ImportError:
logging.warning('Importing local datastore_{rpc,query}')
from . import datastore_rpc
from . import datastore_query
from . import monkey
|
Make official google imports actually work.
|
## Code Before:
import logging
try:
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import datastore_query
logging.info('Imported official google datastore_{rpc,query}')
except ImportError:
logging.warning('Importing local datastore_{rpc,query}')
from . import datastore_rpc
from . import datastore_query
from . import monkey
## Instruction:
Make official google imports actually work.
## Code After:
import logging
import sys
try:
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import datastore_query
sys.modules['core.datastore_rpc'] = datastore_rpc
sys.modules['core.datastore_query'] = datastore_query
logging.info('Imported official google datastore_{rpc,query}')
except ImportError:
logging.warning('Importing local datastore_{rpc,query}')
from . import datastore_rpc
from . import datastore_query
from . import monkey
|
import logging
+ import sys
try:
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import datastore_query
+ sys.modules['core.datastore_rpc'] = datastore_rpc
+ sys.modules['core.datastore_query'] = datastore_query
logging.info('Imported official google datastore_{rpc,query}')
except ImportError:
logging.warning('Importing local datastore_{rpc,query}')
from . import datastore_rpc
from . import datastore_query
from . import monkey
|
ef89d3608b9ab54aef105528f2c15fa9cc437bcd
|
runtests.py
|
runtests.py
|
import sys
from os.path import abspath, dirname
from django.conf import settings
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
import sys
from os.path import abspath, dirname
from django.conf import settings
import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
if hasattr(django, 'setup'):
django.setup()
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ['email_log.tests']
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ['tests']
failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
Fix tests for Django 1.7
|
Fix tests for Django 1.7
|
Python
|
mit
|
treyhunner/django-email-log,treyhunner/django-email-log
|
import sys
from os.path import abspath, dirname
from django.conf import settings
+ import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
+ if hasattr(django, 'setup'):
+ django.setup()
+ try:
+ from django.test.runner import DiscoverRunner
+ runner_class = DiscoverRunner
+ test_args = ['email_log.tests']
+ except ImportError:
- from django.test.simple import DjangoTestSuiteRunner
+ from django.test.simple import DjangoTestSuiteRunner
+ runner_class = DjangoTestSuiteRunner
+ test_args = ['tests']
+
- failures = DjangoTestSuiteRunner(failfast=False).run_tests(['tests'])
+ failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
Fix tests for Django 1.7
|
## Code Before:
import sys
from os.path import abspath, dirname
from django.conf import settings
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == "__main__":
runtests()
## Instruction:
Fix tests for Django 1.7
## Code After:
import sys
from os.path import abspath, dirname
from django.conf import settings
import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
if hasattr(django, 'setup'):
django.setup()
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ['email_log.tests']
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ['tests']
failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
import sys
from os.path import abspath, dirname
from django.conf import settings
+ import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
+ if hasattr(django, 'setup'):
+ django.setup()
+ try:
+ from django.test.runner import DiscoverRunner
+ runner_class = DiscoverRunner
+ test_args = ['email_log.tests']
+ except ImportError:
- from django.test.simple import DjangoTestSuiteRunner
+ from django.test.simple import DjangoTestSuiteRunner
? ++++
+ runner_class = DjangoTestSuiteRunner
+ test_args = ['tests']
+
- failures = DjangoTestSuiteRunner(failfast=False).run_tests(['tests'])
? ^^^^^^^^^^^^^^^^ -- --
+ failures = runner_class(failfast=False).run_tests(test_args)
? ^ ++++++ ++++
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
3ac86b4c058f920c9ec774c192d84050d61c8cc3
|
tests/__init__.py
|
tests/__init__.py
|
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
os.remove(os.path.join("tests/resources", path))
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
os.rmdir(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
Fix bug; os.remove cannot remove directories
|
Fix bug; os.remove cannot remove directories
|
Python
|
mit
|
koji-kojiro/hylang-hycc
|
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
- os.remove(os.path.join("tests/resources", path))
+ path = os.path.join("tests/resources", path)
+ if os.path.isdir(path):
+ os.rmdir(path)
+ else:
+ os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
Fix bug; os.remove cannot remove directories
|
## Code Before:
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
os.remove(os.path.join("tests/resources", path))
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
## Instruction:
Fix bug; os.remove cannot remove directories
## Code After:
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
os.rmdir(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
- os.remove(os.path.join("tests/resources", path))
? ^^^^^^^^^^ -
+ path = os.path.join("tests/resources", path)
? ^^^^^^^
+ if os.path.isdir(path):
+ os.rmdir(path)
+ else:
+ os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
097cccec41d4455c73d586ef4506075f8c7c1004
|
amon/apps/notifications/opsgenie/sender.py
|
amon/apps/notifications/opsgenie/sender.py
|
import requests
import json
from amon.apps.notifications.models import notifications_model
def send_opsgenie_notification(message=None, auth=None):
sent = False
url = "https://api.opsgenie.com/v1/json/alert"
# Message is limited to 130 chars
data = {
'apiKey': auth.get('api_key'),
'message': message,
}
data = json.dumps(data)
error = None
try:
r = requests.post(url, data=data, timeout=5)
except Exception as e:
error = e
return error
|
import requests
import json
from amon.apps.notifications.models import notifications_model
def send_opsgenie_notification(message=None, auth=None):
sent = False
url = "https://api.opsgenie.com/v2/alerts"
headers = {
'Authorization': 'GenieKey '+ auth.get('api_key'),
'Content-Type': 'application/json'
}
# Message is limited to 130 chars
data = {
'message': message,
}
data = json.dumps(data)
error = None
try:
r = requests.post(url, data=data, timeout=5, headers=headers)
except Exception as e:
error = e
return error
|
Switch to OpsGenie API V2
|
Switch to OpsGenie API V2
|
Python
|
agpl-3.0
|
amonapp/amon,amonapp/amon,martinrusev/amonone,martinrusev/amonone,amonapp/amon,amonapp/amon,martinrusev/amonone,amonapp/amon,martinrusev/amonone
|
import requests
import json
from amon.apps.notifications.models import notifications_model
-
+
def send_opsgenie_notification(message=None, auth=None):
sent = False
- url = "https://api.opsgenie.com/v1/json/alert"
+ url = "https://api.opsgenie.com/v2/alerts"
-
+ headers = {
+ 'Authorization': 'GenieKey '+ auth.get('api_key'),
+ 'Content-Type': 'application/json'
+ }
# Message is limited to 130 chars
data = {
- 'apiKey': auth.get('api_key'),
'message': message,
}
data = json.dumps(data)
error = None
-
+
try:
- r = requests.post(url, data=data, timeout=5)
+ r = requests.post(url, data=data, timeout=5, headers=headers)
except Exception as e:
error = e
return error
|
Switch to OpsGenie API V2
|
## Code Before:
import requests
import json
from amon.apps.notifications.models import notifications_model
def send_opsgenie_notification(message=None, auth=None):
sent = False
url = "https://api.opsgenie.com/v1/json/alert"
# Message is limited to 130 chars
data = {
'apiKey': auth.get('api_key'),
'message': message,
}
data = json.dumps(data)
error = None
try:
r = requests.post(url, data=data, timeout=5)
except Exception as e:
error = e
return error
## Instruction:
Switch to OpsGenie API V2
## Code After:
import requests
import json
from amon.apps.notifications.models import notifications_model
def send_opsgenie_notification(message=None, auth=None):
sent = False
url = "https://api.opsgenie.com/v2/alerts"
headers = {
'Authorization': 'GenieKey '+ auth.get('api_key'),
'Content-Type': 'application/json'
}
# Message is limited to 130 chars
data = {
'message': message,
}
data = json.dumps(data)
error = None
try:
r = requests.post(url, data=data, timeout=5, headers=headers)
except Exception as e:
error = e
return error
|
import requests
import json
from amon.apps.notifications.models import notifications_model
-
+
def send_opsgenie_notification(message=None, auth=None):
sent = False
- url = "https://api.opsgenie.com/v1/json/alert"
? ^^^^^^
+ url = "https://api.opsgenie.com/v2/alerts"
? ^ +
-
+ headers = {
+ 'Authorization': 'GenieKey '+ auth.get('api_key'),
+ 'Content-Type': 'application/json'
+ }
# Message is limited to 130 chars
data = {
- 'apiKey': auth.get('api_key'),
'message': message,
}
data = json.dumps(data)
error = None
-
+
try:
- r = requests.post(url, data=data, timeout=5)
+ r = requests.post(url, data=data, timeout=5, headers=headers)
? +++++++++++++++++
except Exception as e:
error = e
return error
|
168937c586b228c05ada2da79a55c9416c3180d3
|
antifuzz.py
|
antifuzz.py
|
'''
File: antifuzz.py
Authors: Kaitlin Keenan and Ryan Frank
'''
import sys
from shutil import copy2
import subprocess
import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html
def main():
# Take in file
ogFile = sys.argv[1]
# Make copy of file
newFile = sys.argv[2]
# Mess with the given file
cmd(['lame','--quiet', '--scale', '1', ogFile])
print cmd(['mv', ogFile + ".mp3", newFile])
# Hash files
ogHash = ssdeep.hash_from_file(ogFile)
newHash = ssdeep.hash_from_file(newFile)
# Compare the hashes
#print ogHash
print ssdeep.compare(ogHash, newHash)
def cmd(command):
#if (arg2 && arg1):
p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
out, err = p.communicate()
return out
if __name__ == "__main__":
main()
|
'''
File: antifuzz.py
Authors: Kaitlin Keenan and Ryan Frank
'''
import sys
from shutil import copy2
import subprocess
import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("originalFile", help="File to antifuzz")
parser.add_argument("newFile", help="Name of the antifuzzed file")
args = parser.parse_args()
# Take in file
ogFile = args.originalFile
# Make copy of file
nFile = args.newFile
# Mess with the given file
mp3(ogFile, nFile)
# Hash files
ogHash = ssdeep.hash_from_file(ogFile)
newHash = ssdeep.hash_from_file(nFile)
# Compare the hashes
#print ogHash
diff=str(ssdeep.compare(ogHash, newHash))
print("The files are " + diff + "% different")
def mp3(ogFile, newFile):
cmd(['lame','--quiet', '--scale', '1', ogFile])
cmd(['mv', ogFile + ".mp3", newFile])
def cmd(command):
#if (arg2 && arg1):
p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
out, err = p.communicate()
return out
if __name__ == "__main__":
main()
|
Add help, make output more user friendly
|
Add help, make output more user friendly
|
Python
|
mit
|
ForensicTools/antifuzzyhashing-475-2161_Keenan_Frank
|
'''
File: antifuzz.py
Authors: Kaitlin Keenan and Ryan Frank
'''
import sys
from shutil import copy2
import subprocess
import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html
+ import argparse
def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("originalFile", help="File to antifuzz")
+ parser.add_argument("newFile", help="Name of the antifuzzed file")
+ args = parser.parse_args()
+
# Take in file
- ogFile = sys.argv[1]
+ ogFile = args.originalFile
# Make copy of file
- newFile = sys.argv[2]
+ nFile = args.newFile
# Mess with the given file
+ mp3(ogFile, nFile)
- cmd(['lame','--quiet', '--scale', '1', ogFile])
- print cmd(['mv', ogFile + ".mp3", newFile])
# Hash files
ogHash = ssdeep.hash_from_file(ogFile)
- newHash = ssdeep.hash_from_file(newFile)
+ newHash = ssdeep.hash_from_file(nFile)
# Compare the hashes
#print ogHash
- print ssdeep.compare(ogHash, newHash)
+ diff=str(ssdeep.compare(ogHash, newHash))
+ print("The files are " + diff + "% different")
+
+ def mp3(ogFile, newFile):
+ cmd(['lame','--quiet', '--scale', '1', ogFile])
+ cmd(['mv', ogFile + ".mp3", newFile])
def cmd(command):
#if (arg2 && arg1):
p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
out, err = p.communicate()
return out
if __name__ == "__main__":
main()
|
Add help, make output more user friendly
|
## Code Before:
'''
File: antifuzz.py
Authors: Kaitlin Keenan and Ryan Frank
'''
import sys
from shutil import copy2
import subprocess
import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html
def main():
# Take in file
ogFile = sys.argv[1]
# Make copy of file
newFile = sys.argv[2]
# Mess with the given file
cmd(['lame','--quiet', '--scale', '1', ogFile])
print cmd(['mv', ogFile + ".mp3", newFile])
# Hash files
ogHash = ssdeep.hash_from_file(ogFile)
newHash = ssdeep.hash_from_file(newFile)
# Compare the hashes
#print ogHash
print ssdeep.compare(ogHash, newHash)
def cmd(command):
#if (arg2 && arg1):
p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
out, err = p.communicate()
return out
if __name__ == "__main__":
main()
## Instruction:
Add help, make output more user friendly
## Code After:
'''
File: antifuzz.py
Authors: Kaitlin Keenan and Ryan Frank
'''
import sys
from shutil import copy2
import subprocess
import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument("originalFile", help="File to antifuzz")
parser.add_argument("newFile", help="Name of the antifuzzed file")
args = parser.parse_args()
# Take in file
ogFile = args.originalFile
# Make copy of file
nFile = args.newFile
# Mess with the given file
mp3(ogFile, nFile)
# Hash files
ogHash = ssdeep.hash_from_file(ogFile)
newHash = ssdeep.hash_from_file(nFile)
# Compare the hashes
#print ogHash
diff=str(ssdeep.compare(ogHash, newHash))
print("The files are " + diff + "% different")
def mp3(ogFile, newFile):
cmd(['lame','--quiet', '--scale', '1', ogFile])
cmd(['mv', ogFile + ".mp3", newFile])
def cmd(command):
#if (arg2 && arg1):
p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
out, err = p.communicate()
return out
if __name__ == "__main__":
main()
|
'''
File: antifuzz.py
Authors: Kaitlin Keenan and Ryan Frank
'''
import sys
from shutil import copy2
import subprocess
import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html
+ import argparse
def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("originalFile", help="File to antifuzz")
+ parser.add_argument("newFile", help="Name of the antifuzzed file")
+ args = parser.parse_args()
+
# Take in file
- ogFile = sys.argv[1]
+ ogFile = args.originalFile
# Make copy of file
- newFile = sys.argv[2]
+ nFile = args.newFile
# Mess with the given file
+ mp3(ogFile, nFile)
- cmd(['lame','--quiet', '--scale', '1', ogFile])
- print cmd(['mv', ogFile + ".mp3", newFile])
# Hash files
ogHash = ssdeep.hash_from_file(ogFile)
- newHash = ssdeep.hash_from_file(newFile)
? --
+ newHash = ssdeep.hash_from_file(nFile)
# Compare the hashes
#print ogHash
- print ssdeep.compare(ogHash, newHash)
? ^ ^^^^
+ diff=str(ssdeep.compare(ogHash, newHash))
? ^^^^^^^ ^ +
+ print("The files are " + diff + "% different")
+
+ def mp3(ogFile, newFile):
+ cmd(['lame','--quiet', '--scale', '1', ogFile])
+ cmd(['mv', ogFile + ".mp3", newFile])
def cmd(command):
#if (arg2 && arg1):
p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
out, err = p.communicate()
return out
if __name__ == "__main__":
main()
|
c136ee96237a05cb717c777dd33b9a3dff9b0015
|
test/test_py3.py
|
test/test_py3.py
|
import pytest
from in_place import InPlace
from test_in_place_util import UNICODE, pylistdir
def test_py3_textstr(tmpdir):
""" Assert that `InPlace` works with text strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
p.write_text(UNICODE, 'utf-8')
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
print(UNICODE, file=fp)
assert pylistdir(tmpdir) == ['file.txt']
assert p.read_text('utf-8') == UNICODE + '\n'
def test_py3_not_bytestr(tmpdir):
""" Assert that `InPlace` does not work with byte strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
p.write_text(UNICODE, 'utf-8')
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
txt = txt.encode('utf-8')
with pytest.raises(TypeError):
# `print()` would stringify `txt` to `b'...'`, which is not what we
# want.
fp.write(txt)
|
import locale
import pytest
from in_place import InPlace
from test_in_place_util import UNICODE, pylistdir
def test_py3_textstr(tmpdir):
""" Assert that `InPlace` works with text strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
p.write_text(UNICODE, locale.getpreferredencoding())
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
print(UNICODE, file=fp)
assert pylistdir(tmpdir) == ['file.txt']
assert p.read_text(locale.getpreferredencoding()) == UNICODE + '\n'
def test_py3_not_bytestr(tmpdir):
""" Assert that `InPlace` does not work with byte strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
p.write_text(UNICODE, locale.getpreferredencoding())
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
txt = txt.encode('utf-8')
with pytest.raises(TypeError):
# `print()` would stringify `txt` to `b'...'`, which is not what we
# want.
fp.write(txt)
|
Handle different default encoding on Windows in tests
|
Handle different default encoding on Windows in tests
|
Python
|
mit
|
jwodder/inplace
|
+ import locale
import pytest
from in_place import InPlace
from test_in_place_util import UNICODE, pylistdir
def test_py3_textstr(tmpdir):
""" Assert that `InPlace` works with text strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
- p.write_text(UNICODE, 'utf-8')
+ p.write_text(UNICODE, locale.getpreferredencoding())
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
print(UNICODE, file=fp)
assert pylistdir(tmpdir) == ['file.txt']
- assert p.read_text('utf-8') == UNICODE + '\n'
+ assert p.read_text(locale.getpreferredencoding()) == UNICODE + '\n'
def test_py3_not_bytestr(tmpdir):
""" Assert that `InPlace` does not work with byte strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
- p.write_text(UNICODE, 'utf-8')
+ p.write_text(UNICODE, locale.getpreferredencoding())
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
txt = txt.encode('utf-8')
with pytest.raises(TypeError):
# `print()` would stringify `txt` to `b'...'`, which is not what we
# want.
fp.write(txt)
|
Handle different default encoding on Windows in tests
|
## Code Before:
import pytest
from in_place import InPlace
from test_in_place_util import UNICODE, pylistdir
def test_py3_textstr(tmpdir):
""" Assert that `InPlace` works with text strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
p.write_text(UNICODE, 'utf-8')
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
print(UNICODE, file=fp)
assert pylistdir(tmpdir) == ['file.txt']
assert p.read_text('utf-8') == UNICODE + '\n'
def test_py3_not_bytestr(tmpdir):
""" Assert that `InPlace` does not work with byte strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
p.write_text(UNICODE, 'utf-8')
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
txt = txt.encode('utf-8')
with pytest.raises(TypeError):
# `print()` would stringify `txt` to `b'...'`, which is not what we
# want.
fp.write(txt)
## Instruction:
Handle different default encoding on Windows in tests
## Code After:
import locale
import pytest
from in_place import InPlace
from test_in_place_util import UNICODE, pylistdir
def test_py3_textstr(tmpdir):
""" Assert that `InPlace` works with text strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
p.write_text(UNICODE, locale.getpreferredencoding())
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
print(UNICODE, file=fp)
assert pylistdir(tmpdir) == ['file.txt']
assert p.read_text(locale.getpreferredencoding()) == UNICODE + '\n'
def test_py3_not_bytestr(tmpdir):
""" Assert that `InPlace` does not work with byte strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
p.write_text(UNICODE, locale.getpreferredencoding())
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
txt = txt.encode('utf-8')
with pytest.raises(TypeError):
# `print()` would stringify `txt` to `b'...'`, which is not what we
# want.
fp.write(txt)
|
+ import locale
import pytest
from in_place import InPlace
from test_in_place_util import UNICODE, pylistdir
def test_py3_textstr(tmpdir):
""" Assert that `InPlace` works with text strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
- p.write_text(UNICODE, 'utf-8')
+ p.write_text(UNICODE, locale.getpreferredencoding())
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
print(UNICODE, file=fp)
assert pylistdir(tmpdir) == ['file.txt']
- assert p.read_text('utf-8') == UNICODE + '\n'
+ assert p.read_text(locale.getpreferredencoding()) == UNICODE + '\n'
def test_py3_not_bytestr(tmpdir):
""" Assert that `InPlace` does not work with byte strings in Python 3 """
assert pylistdir(tmpdir) == []
p = tmpdir.join("file.txt")
- p.write_text(UNICODE, 'utf-8')
+ p.write_text(UNICODE, locale.getpreferredencoding())
with InPlace(str(p)) as fp:
txt = fp.read()
assert isinstance(txt, str)
assert txt == UNICODE
txt = txt.encode('utf-8')
with pytest.raises(TypeError):
# `print()` would stringify `txt` to `b'...'`, which is not what we
# want.
fp.write(txt)
|
fa75cdb0114d86b626a77ea19897abd532fd4aeb
|
src/hack4lt/forms.py
|
src/hack4lt/forms.py
|
from django import forms
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
from hack4lt.models import Hacker
class RegistrationForm(forms.ModelForm):
class Meta:
model = Hacker
fields = ('username', 'first_name', 'last_name', 'email', 'repository',
'website', 'stackoverflow_user', 'description')
class LoginForm(forms.Form):
username = forms.CharField(label=_('Username'), max_length=100)
password = forms.CharField(label=_('Password'), max_length=128,
widget=forms.PasswordInput(render_value=False))
def clean(self):
cleaned_data = super(LoginForm, self).clean()
if self.errors:
return cleaned_data
user = authenticate(**cleaned_data)
if not user:
raise forms.ValidationError(_('Username or password is incorrect'))
cleaned_data['user'] = user
return cleaned_data
|
from django import forms
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
from django.forms.util import ErrorList
from hack4lt.models import Hacker
class RegistrationForm(forms.ModelForm):
password = forms.CharField(label=_('Password'), max_length=128, min_length=6,
widget=forms.PasswordInput(render_value=False))
password_repeat = forms.CharField(label=_('Repeat Password'), min_length=6,
max_length=128, widget=forms.PasswordInput(render_value=False))
class Meta:
model = Hacker
fields = ('username', 'password', 'password_repeat', 'first_name',
'last_name', 'email', 'repository', 'website',
'stackoverflow_user', 'description')
def is_valid(self):
valid = super(RegistrationForm, self).is_valid()
if not valid:
return valid
first_password = self.cleaned_data.get('password')
repeat_password = self.cleaned_data.get('password_repeat')
if first_password == repeat_password:
return True
errors = self._errors.setdefault('password', ErrorList())
errors.append(u'Passwords do not match')
return False
class LoginForm(forms.Form):
username = forms.CharField(label=_('Username'), max_length=100)
password = forms.CharField(label=_('Password'), max_length=128,
widget=forms.PasswordInput(render_value=False))
def clean(self):
cleaned_data = super(LoginForm, self).clean()
if self.errors:
return cleaned_data
user = authenticate(**cleaned_data)
if not user:
raise forms.ValidationError(_('Username or password is incorrect'))
cleaned_data['user'] = user
return cleaned_data
|
Add password and password_repeat fields to registration form.
|
Add password and password_repeat fields to registration form.
|
Python
|
bsd-3-clause
|
niekas/Hack4LT
|
from django import forms
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
+ from django.forms.util import ErrorList
from hack4lt.models import Hacker
+ class RegistrationForm(forms.ModelForm):
+ password = forms.CharField(label=_('Password'), max_length=128, min_length=6,
+ widget=forms.PasswordInput(render_value=False))
+ password_repeat = forms.CharField(label=_('Repeat Password'), min_length=6,
+ max_length=128, widget=forms.PasswordInput(render_value=False))
- class RegistrationForm(forms.ModelForm):
class Meta:
model = Hacker
- fields = ('username', 'first_name', 'last_name', 'email', 'repository',
+ fields = ('username', 'password', 'password_repeat', 'first_name',
+ 'last_name', 'email', 'repository', 'website',
- 'website', 'stackoverflow_user', 'description')
+ 'stackoverflow_user', 'description')
+
+ def is_valid(self):
+ valid = super(RegistrationForm, self).is_valid()
+ if not valid:
+ return valid
+
+ first_password = self.cleaned_data.get('password')
+ repeat_password = self.cleaned_data.get('password_repeat')
+
+ if first_password == repeat_password:
+ return True
+ errors = self._errors.setdefault('password', ErrorList())
+ errors.append(u'Passwords do not match')
+ return False
class LoginForm(forms.Form):
username = forms.CharField(label=_('Username'), max_length=100)
password = forms.CharField(label=_('Password'), max_length=128,
widget=forms.PasswordInput(render_value=False))
def clean(self):
cleaned_data = super(LoginForm, self).clean()
if self.errors:
return cleaned_data
user = authenticate(**cleaned_data)
if not user:
raise forms.ValidationError(_('Username or password is incorrect'))
cleaned_data['user'] = user
return cleaned_data
|
Add password and password_repeat fields to registration form.
|
## Code Before:
from django import forms
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
from hack4lt.models import Hacker
class RegistrationForm(forms.ModelForm):
class Meta:
model = Hacker
fields = ('username', 'first_name', 'last_name', 'email', 'repository',
'website', 'stackoverflow_user', 'description')
class LoginForm(forms.Form):
username = forms.CharField(label=_('Username'), max_length=100)
password = forms.CharField(label=_('Password'), max_length=128,
widget=forms.PasswordInput(render_value=False))
def clean(self):
cleaned_data = super(LoginForm, self).clean()
if self.errors:
return cleaned_data
user = authenticate(**cleaned_data)
if not user:
raise forms.ValidationError(_('Username or password is incorrect'))
cleaned_data['user'] = user
return cleaned_data
## Instruction:
Add password and password_repeat fields to registration form.
## Code After:
from django import forms
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
from django.forms.util import ErrorList
from hack4lt.models import Hacker
class RegistrationForm(forms.ModelForm):
password = forms.CharField(label=_('Password'), max_length=128, min_length=6,
widget=forms.PasswordInput(render_value=False))
password_repeat = forms.CharField(label=_('Repeat Password'), min_length=6,
max_length=128, widget=forms.PasswordInput(render_value=False))
class Meta:
model = Hacker
fields = ('username', 'password', 'password_repeat', 'first_name',
'last_name', 'email', 'repository', 'website',
'stackoverflow_user', 'description')
def is_valid(self):
valid = super(RegistrationForm, self).is_valid()
if not valid:
return valid
first_password = self.cleaned_data.get('password')
repeat_password = self.cleaned_data.get('password_repeat')
if first_password == repeat_password:
return True
errors = self._errors.setdefault('password', ErrorList())
errors.append(u'Passwords do not match')
return False
class LoginForm(forms.Form):
username = forms.CharField(label=_('Username'), max_length=100)
password = forms.CharField(label=_('Password'), max_length=128,
widget=forms.PasswordInput(render_value=False))
def clean(self):
cleaned_data = super(LoginForm, self).clean()
if self.errors:
return cleaned_data
user = authenticate(**cleaned_data)
if not user:
raise forms.ValidationError(_('Username or password is incorrect'))
cleaned_data['user'] = user
return cleaned_data
|
from django import forms
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
+ from django.forms.util import ErrorList
from hack4lt.models import Hacker
+ class RegistrationForm(forms.ModelForm):
+ password = forms.CharField(label=_('Password'), max_length=128, min_length=6,
+ widget=forms.PasswordInput(render_value=False))
+ password_repeat = forms.CharField(label=_('Repeat Password'), min_length=6,
+ max_length=128, widget=forms.PasswordInput(render_value=False))
- class RegistrationForm(forms.ModelForm):
class Meta:
model = Hacker
- fields = ('username', 'first_name', 'last_name', 'email', 'repository',
+ fields = ('username', 'password', 'password_repeat', 'first_name',
+ 'last_name', 'email', 'repository', 'website',
- 'website', 'stackoverflow_user', 'description')
? ^^^^^^^^^^
+ 'stackoverflow_user', 'description')
? ^
+
+ def is_valid(self):
+ valid = super(RegistrationForm, self).is_valid()
+ if not valid:
+ return valid
+
+ first_password = self.cleaned_data.get('password')
+ repeat_password = self.cleaned_data.get('password_repeat')
+
+ if first_password == repeat_password:
+ return True
+ errors = self._errors.setdefault('password', ErrorList())
+ errors.append(u'Passwords do not match')
+ return False
class LoginForm(forms.Form):
username = forms.CharField(label=_('Username'), max_length=100)
password = forms.CharField(label=_('Password'), max_length=128,
widget=forms.PasswordInput(render_value=False))
def clean(self):
cleaned_data = super(LoginForm, self).clean()
if self.errors:
return cleaned_data
user = authenticate(**cleaned_data)
if not user:
raise forms.ValidationError(_('Username or password is incorrect'))
cleaned_data['user'] = user
return cleaned_data
|
fea4f04abc18b8dcf4970a1f338a8d610f04260d
|
src/pytz/tests/test_docs.py
|
src/pytz/tests/test_docs.py
|
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
if __name__ == '__main__':
unittest.main(defaultTest='README')
|
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
def test_suite():
return README
if __name__ == '__main__':
unittest.main(defaultTest='README')
|
Add a test_suite method as used by the Zope3 test runner
|
Add a test_suite method as used by the Zope3 test runner
|
Python
|
mit
|
stub42/pytz,stub42/pytz,stub42/pytz,stub42/pytz
|
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
+ def test_suite():
+ return README
+
if __name__ == '__main__':
unittest.main(defaultTest='README')
|
Add a test_suite method as used by the Zope3 test runner
|
## Code Before:
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
if __name__ == '__main__':
unittest.main(defaultTest='README')
## Instruction:
Add a test_suite method as used by the Zope3 test runner
## Code After:
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
def test_suite():
return README
if __name__ == '__main__':
unittest.main(defaultTest='README')
|
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
+ def test_suite():
+ return README
+
if __name__ == '__main__':
unittest.main(defaultTest='README')
|
617ac4a745afb07299c73977477f52911f3e6e4c
|
flask_skeleton_api/app.py
|
flask_skeleton_api/app.py
|
from flask import Flask, g, request
import uuid
import requests
app = Flask(__name__)
app.config.from_pyfile("config.py")
@app.before_request
def before_request():
# Sets the transaction trace id into the global object if it has been provided in the HTTP header from the caller.
# Generate a new one if it has not. We will use this in log messages.
trace_id = request.headers.get('X-Trace-ID', None)
if trace_id is None:
trace_id = uuid.uuid4().hex
g.trace_id = trace_id
# We also create a session-level requests object for the app to use with the header pre-set, so other APIs will receive it.
# These lines can be removed if the app will not make requests to other LR APIs!
g.requests = requests.Session()
g.requests.headers.update({'X-Trace-ID': trace_id})
|
from flask import Flask, g, request
import uuid
import requests
app = Flask(__name__)
app.config.from_pyfile("config.py")
@app.before_request
def before_request():
# Sets the transaction trace id into the global object if it has been provided in the HTTP header from the caller.
# Generate a new one if it has not. We will use this in log messages.
trace_id = request.headers.get('X-Trace-ID', None)
if trace_id is None:
trace_id = uuid.uuid4().hex
g.trace_id = trace_id
# We also create a session-level requests object for the app to use with the header pre-set, so other APIs will receive it.
# These lines can be removed if the app will not make requests to other LR APIs!
g.requests = requests.Session()
g.requests.headers.update({'X-Trace-ID': trace_id})
@app.after_request
def after_request(response):
# Add the API version (as in the interface spec, not the app) to the header. Semantic versioning applies - see the
# API manual. A major version update will need to go in the URL. All changes should be documented though, for
# reusing teams to take advantage of.
response.headers["X-API-Version"] = "1.0.0"
return response
|
Add API version into response header
|
Add API version into response header
|
Python
|
mit
|
matthew-shaw/thing-api
|
from flask import Flask, g, request
import uuid
import requests
app = Flask(__name__)
app.config.from_pyfile("config.py")
@app.before_request
def before_request():
# Sets the transaction trace id into the global object if it has been provided in the HTTP header from the caller.
# Generate a new one if it has not. We will use this in log messages.
trace_id = request.headers.get('X-Trace-ID', None)
if trace_id is None:
trace_id = uuid.uuid4().hex
g.trace_id = trace_id
# We also create a session-level requests object for the app to use with the header pre-set, so other APIs will receive it.
# These lines can be removed if the app will not make requests to other LR APIs!
g.requests = requests.Session()
g.requests.headers.update({'X-Trace-ID': trace_id})
+
+ @app.after_request
+ def after_request(response):
+ # Add the API version (as in the interface spec, not the app) to the header. Semantic versioning applies - see the
+ # API manual. A major version update will need to go in the URL. All changes should be documented though, for
+ # reusing teams to take advantage of.
+ response.headers["X-API-Version"] = "1.0.0"
+ return response
+
|
Add API version into response header
|
## Code Before:
from flask import Flask, g, request
import uuid
import requests
app = Flask(__name__)
app.config.from_pyfile("config.py")
@app.before_request
def before_request():
# Sets the transaction trace id into the global object if it has been provided in the HTTP header from the caller.
# Generate a new one if it has not. We will use this in log messages.
trace_id = request.headers.get('X-Trace-ID', None)
if trace_id is None:
trace_id = uuid.uuid4().hex
g.trace_id = trace_id
# We also create a session-level requests object for the app to use with the header pre-set, so other APIs will receive it.
# These lines can be removed if the app will not make requests to other LR APIs!
g.requests = requests.Session()
g.requests.headers.update({'X-Trace-ID': trace_id})
## Instruction:
Add API version into response header
## Code After:
from flask import Flask, g, request
import uuid
import requests
app = Flask(__name__)
app.config.from_pyfile("config.py")
@app.before_request
def before_request():
# Sets the transaction trace id into the global object if it has been provided in the HTTP header from the caller.
# Generate a new one if it has not. We will use this in log messages.
trace_id = request.headers.get('X-Trace-ID', None)
if trace_id is None:
trace_id = uuid.uuid4().hex
g.trace_id = trace_id
# We also create a session-level requests object for the app to use with the header pre-set, so other APIs will receive it.
# These lines can be removed if the app will not make requests to other LR APIs!
g.requests = requests.Session()
g.requests.headers.update({'X-Trace-ID': trace_id})
@app.after_request
def after_request(response):
# Add the API version (as in the interface spec, not the app) to the header. Semantic versioning applies - see the
# API manual. A major version update will need to go in the URL. All changes should be documented though, for
# reusing teams to take advantage of.
response.headers["X-API-Version"] = "1.0.0"
return response
|
from flask import Flask, g, request
import uuid
import requests
app = Flask(__name__)
app.config.from_pyfile("config.py")
@app.before_request
def before_request():
# Sets the transaction trace id into the global object if it has been provided in the HTTP header from the caller.
# Generate a new one if it has not. We will use this in log messages.
trace_id = request.headers.get('X-Trace-ID', None)
if trace_id is None:
trace_id = uuid.uuid4().hex
g.trace_id = trace_id
# We also create a session-level requests object for the app to use with the header pre-set, so other APIs will receive it.
# These lines can be removed if the app will not make requests to other LR APIs!
g.requests = requests.Session()
g.requests.headers.update({'X-Trace-ID': trace_id})
+
+
+ @app.after_request
+ def after_request(response):
+ # Add the API version (as in the interface spec, not the app) to the header. Semantic versioning applies - see the
+ # API manual. A major version update will need to go in the URL. All changes should be documented though, for
+ # reusing teams to take advantage of.
+ response.headers["X-API-Version"] = "1.0.0"
+ return response
|
11f43c583fb3b7e8ed2aa74f0f58445a6c2fbecf
|
bot/api/api.py
|
bot/api/api.py
|
from bot.api.domain import Message
from bot.api.telegram import TelegramBotApi
from bot.storage import State
class Api:
def __init__(self, telegram_api: TelegramBotApi, state: State):
self.telegram_api = telegram_api
self.state = state
def send_message(self, message: Message, **params):
message_params = message.data.copy()
message_params.update(params)
return self.telegram_api.sendMessage(**message_params)
def get_pending_updates(self):
return self.get_updates(timeout=0)
def get_updates(self, timeout=45):
updates = self.telegram_api.getUpdates(offset=self.__get_updates_offset(), timeout=timeout)
for update in updates:
self.__set_updates_offset(update.update_id)
yield update
def __get_updates_offset(self):
return self.state.next_update_id
def __set_updates_offset(self, last_update_id):
self.state.next_update_id = str(last_update_id + 1)
def __getattr__(self, item):
return self.telegram_api.__getattr__(item)
|
from bot.api.domain import Message
from bot.api.telegram import TelegramBotApi
from bot.storage import State
class Api:
def __init__(self, telegram_api: TelegramBotApi, state: State):
self.telegram_api = telegram_api
self.state = state
def send_message(self, message: Message, **params):
message_params = message.data.copy()
message_params.update(params)
return self.telegram_api.sendMessage(**message_params)
def get_pending_updates(self):
there_are_pending_updates = True
while there_are_pending_updates:
there_are_pending_updates = False
for update in self.get_updates(timeout=0):
there_are_pending_updates = True
yield update
def get_updates(self, timeout=45):
updates = self.telegram_api.getUpdates(offset=self.__get_updates_offset(), timeout=timeout)
for update in updates:
self.__set_updates_offset(update.update_id)
yield update
def __get_updates_offset(self):
return self.state.next_update_id
def __set_updates_offset(self, last_update_id):
self.state.next_update_id = str(last_update_id + 1)
def __getattr__(self, item):
return self.telegram_api.__getattr__(item)
|
Fix get_pending_updates not correctly returning all pending updates
|
Fix get_pending_updates not correctly returning all pending updates
It was only returning the first 100 ones returned in the first telegram API call.
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
from bot.api.domain import Message
from bot.api.telegram import TelegramBotApi
from bot.storage import State
class Api:
def __init__(self, telegram_api: TelegramBotApi, state: State):
self.telegram_api = telegram_api
self.state = state
def send_message(self, message: Message, **params):
message_params = message.data.copy()
message_params.update(params)
return self.telegram_api.sendMessage(**message_params)
def get_pending_updates(self):
+ there_are_pending_updates = True
+ while there_are_pending_updates:
+ there_are_pending_updates = False
- return self.get_updates(timeout=0)
+ for update in self.get_updates(timeout=0):
+ there_are_pending_updates = True
+ yield update
def get_updates(self, timeout=45):
updates = self.telegram_api.getUpdates(offset=self.__get_updates_offset(), timeout=timeout)
for update in updates:
self.__set_updates_offset(update.update_id)
yield update
def __get_updates_offset(self):
return self.state.next_update_id
def __set_updates_offset(self, last_update_id):
self.state.next_update_id = str(last_update_id + 1)
def __getattr__(self, item):
return self.telegram_api.__getattr__(item)
|
Fix get_pending_updates not correctly returning all pending updates
|
## Code Before:
from bot.api.domain import Message
from bot.api.telegram import TelegramBotApi
from bot.storage import State
class Api:
def __init__(self, telegram_api: TelegramBotApi, state: State):
self.telegram_api = telegram_api
self.state = state
def send_message(self, message: Message, **params):
message_params = message.data.copy()
message_params.update(params)
return self.telegram_api.sendMessage(**message_params)
def get_pending_updates(self):
return self.get_updates(timeout=0)
def get_updates(self, timeout=45):
updates = self.telegram_api.getUpdates(offset=self.__get_updates_offset(), timeout=timeout)
for update in updates:
self.__set_updates_offset(update.update_id)
yield update
def __get_updates_offset(self):
return self.state.next_update_id
def __set_updates_offset(self, last_update_id):
self.state.next_update_id = str(last_update_id + 1)
def __getattr__(self, item):
return self.telegram_api.__getattr__(item)
## Instruction:
Fix get_pending_updates not correctly returning all pending updates
## Code After:
from bot.api.domain import Message
from bot.api.telegram import TelegramBotApi
from bot.storage import State
class Api:
def __init__(self, telegram_api: TelegramBotApi, state: State):
self.telegram_api = telegram_api
self.state = state
def send_message(self, message: Message, **params):
message_params = message.data.copy()
message_params.update(params)
return self.telegram_api.sendMessage(**message_params)
def get_pending_updates(self):
there_are_pending_updates = True
while there_are_pending_updates:
there_are_pending_updates = False
for update in self.get_updates(timeout=0):
there_are_pending_updates = True
yield update
def get_updates(self, timeout=45):
updates = self.telegram_api.getUpdates(offset=self.__get_updates_offset(), timeout=timeout)
for update in updates:
self.__set_updates_offset(update.update_id)
yield update
def __get_updates_offset(self):
return self.state.next_update_id
def __set_updates_offset(self, last_update_id):
self.state.next_update_id = str(last_update_id + 1)
def __getattr__(self, item):
return self.telegram_api.__getattr__(item)
|
from bot.api.domain import Message
from bot.api.telegram import TelegramBotApi
from bot.storage import State
class Api:
def __init__(self, telegram_api: TelegramBotApi, state: State):
self.telegram_api = telegram_api
self.state = state
def send_message(self, message: Message, **params):
message_params = message.data.copy()
message_params.update(params)
return self.telegram_api.sendMessage(**message_params)
def get_pending_updates(self):
+ there_are_pending_updates = True
+ while there_are_pending_updates:
+ there_are_pending_updates = False
- return self.get_updates(timeout=0)
? ^^^
+ for update in self.get_updates(timeout=0):
? ++++++ ++++++ ^^ +
+ there_are_pending_updates = True
+ yield update
def get_updates(self, timeout=45):
updates = self.telegram_api.getUpdates(offset=self.__get_updates_offset(), timeout=timeout)
for update in updates:
self.__set_updates_offset(update.update_id)
yield update
def __get_updates_offset(self):
return self.state.next_update_id
def __set_updates_offset(self, last_update_id):
self.state.next_update_id = str(last_update_id + 1)
def __getattr__(self, item):
return self.telegram_api.__getattr__(item)
|
8995cbf71454e3424e15913661ee659c48f7b8fa
|
volunteer_planner/settings/local_mysql.py
|
volunteer_planner/settings/local_mysql.py
|
from volunteer_planner.settings.local import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'volunteer_planner',
'PASSWORD': os.environ.get('DATABASE_PW', 'volunteer_planner'),
'USER': os.environ.get('DB_USER', 'vp')
}
}
|
from volunteer_planner.settings.local import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': os.environ.get('DATABASE_NAME', 'volunteer_planner'),
'PASSWORD': os.environ.get('DATABASE_PW', 'volunteer_planner'),
'USER': os.environ.get('DB_USER', 'vp')
}
}
|
Make local mysql db name overridable with DATABASE_NAME environment variable
|
Make local mysql db name overridable with DATABASE_NAME environment variable
|
Python
|
agpl-3.0
|
christophmeissner/volunteer_planner,christophmeissner/volunteer_planner,coders4help/volunteer_planner,klinger/volunteer_planner,klinger/volunteer_planner,pitpalme/volunteer_planner,pitpalme/volunteer_planner,pitpalme/volunteer_planner,coders4help/volunteer_planner,pitpalme/volunteer_planner,coders4help/volunteer_planner,christophmeissner/volunteer_planner,alper/volunteer_planner,klinger/volunteer_planner,coders4help/volunteer_planner,alper/volunteer_planner,volunteer-planner/volunteer_planner,christophmeissner/volunteer_planner,volunteer-planner/volunteer_planner,klinger/volunteer_planner,volunteer-planner/volunteer_planner,volunteer-planner/volunteer_planner,alper/volunteer_planner
|
from volunteer_planner.settings.local import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
- 'NAME': 'volunteer_planner',
+ 'NAME': os.environ.get('DATABASE_NAME', 'volunteer_planner'),
'PASSWORD': os.environ.get('DATABASE_PW', 'volunteer_planner'),
'USER': os.environ.get('DB_USER', 'vp')
}
}
|
Make local mysql db name overridable with DATABASE_NAME environment variable
|
## Code Before:
from volunteer_planner.settings.local import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'volunteer_planner',
'PASSWORD': os.environ.get('DATABASE_PW', 'volunteer_planner'),
'USER': os.environ.get('DB_USER', 'vp')
}
}
## Instruction:
Make local mysql db name overridable with DATABASE_NAME environment variable
## Code After:
from volunteer_planner.settings.local import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': os.environ.get('DATABASE_NAME', 'volunteer_planner'),
'PASSWORD': os.environ.get('DATABASE_PW', 'volunteer_planner'),
'USER': os.environ.get('DB_USER', 'vp')
}
}
|
from volunteer_planner.settings.local import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
- 'NAME': 'volunteer_planner',
+ 'NAME': os.environ.get('DATABASE_NAME', 'volunteer_planner'),
'PASSWORD': os.environ.get('DATABASE_PW', 'volunteer_planner'),
'USER': os.environ.get('DB_USER', 'vp')
}
}
|
cc0c43c3131161902de3a8a68688766cacd637b9
|
lowercasing_test/src/tests/lowercasing/fetchletters.py
|
lowercasing_test/src/tests/lowercasing/fetchletters.py
|
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = filter(lambda x: x[2] == 'Lu' or x[2] == 'Ll', raw)
image = [unichr(int(c[0], 16)) for c in characters]
output = u"\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = [x for x in raw if x[2] == 'Lu' or x[2] == 'Ll']
image = [chr(int(c[0], 16)) for c in characters]
output = "\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
Migrate script ot Python 3
|
Migrate script ot Python 3
|
Python
|
apache-2.0
|
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
|
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
- characters = filter(lambda x: x[2] == 'Lu' or x[2] == 'Ll', raw)
+ characters = [x for x in raw if x[2] == 'Lu' or x[2] == 'Ll']
- image = [unichr(int(c[0], 16)) for c in characters]
+ image = [chr(int(c[0], 16)) for c in characters]
- output = u"\n".join(image)
+ output = "\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
Migrate script ot Python 3
|
## Code Before:
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = filter(lambda x: x[2] == 'Lu' or x[2] == 'Ll', raw)
image = [unichr(int(c[0], 16)) for c in characters]
output = u"\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
## Instruction:
Migrate script ot Python 3
## Code After:
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = [x for x in raw if x[2] == 'Lu' or x[2] == 'Ll']
image = [chr(int(c[0], 16)) for c in characters]
output = "\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
- characters = filter(lambda x: x[2] == 'Lu' or x[2] == 'Ll', raw)
? ^^^^ ------- ^ ^^^^^^
+ characters = [x for x in raw if x[2] == 'Lu' or x[2] == 'Ll']
? +++ ^ ^^^^^^^^^^ ^
- image = [unichr(int(c[0], 16)) for c in characters]
? ---
+ image = [chr(int(c[0], 16)) for c in characters]
- output = u"\n".join(image)
? -
+ output = "\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
048f2d9469b3f9eb266a343602ddf608e3bd6d86
|
highton/models/email_address.py
|
highton/models/email_address.py
|
from highton.models import HightonModel
from highton.highton_constants import HightonConstants
from highton import fields
class EmailAddress(
HightonModel,
):
"""
:ivar id: fields.IntegerField(name=HightonConstants.ID)
:ivar location: fields.StringField(name=HightonConstants.LOCATION)
:ivar address: fields.StringField(name=HightonConstants.ADDRESS)
"""
TAG_NAME = HightonConstants.EMAIL_ADDRESS
def __init__(self, **kwargs):
self.location = fields.StringField(name=HightonConstants.LOCATION)
self.address = fields.StringField(name=HightonConstants.ADDRESS)
super().__init__(**kwargs)
|
from highton.models import HightonModel
from highton.highton_constants import HightonConstants
from highton import fields
class EmailAddress(
HightonModel,
):
"""
:ivar id: fields.IntegerField(name=HightonConstants.ID)
:ivar location: fields.StringField(name=HightonConstants.LOCATION)
:ivar address: fields.StringField(name=HightonConstants.ADDRESS)
"""
TAG_NAME = HightonConstants.EMAIL_ADDRESS
def __init__(self, **kwargs):
self.location = fields.StringField(name=HightonConstants.LOCATION, required=True)
self.address = fields.StringField(name=HightonConstants.ADDRESS, required=True)
super().__init__(**kwargs)
|
Set EmailAddress Things to required
|
Set EmailAddress Things to required
|
Python
|
apache-2.0
|
seibert-media/Highton,seibert-media/Highton
|
from highton.models import HightonModel
from highton.highton_constants import HightonConstants
from highton import fields
class EmailAddress(
HightonModel,
):
"""
:ivar id: fields.IntegerField(name=HightonConstants.ID)
:ivar location: fields.StringField(name=HightonConstants.LOCATION)
:ivar address: fields.StringField(name=HightonConstants.ADDRESS)
"""
TAG_NAME = HightonConstants.EMAIL_ADDRESS
def __init__(self, **kwargs):
- self.location = fields.StringField(name=HightonConstants.LOCATION)
+ self.location = fields.StringField(name=HightonConstants.LOCATION, required=True)
- self.address = fields.StringField(name=HightonConstants.ADDRESS)
+ self.address = fields.StringField(name=HightonConstants.ADDRESS, required=True)
super().__init__(**kwargs)
|
Set EmailAddress Things to required
|
## Code Before:
from highton.models import HightonModel
from highton.highton_constants import HightonConstants
from highton import fields
class EmailAddress(
HightonModel,
):
"""
:ivar id: fields.IntegerField(name=HightonConstants.ID)
:ivar location: fields.StringField(name=HightonConstants.LOCATION)
:ivar address: fields.StringField(name=HightonConstants.ADDRESS)
"""
TAG_NAME = HightonConstants.EMAIL_ADDRESS
def __init__(self, **kwargs):
self.location = fields.StringField(name=HightonConstants.LOCATION)
self.address = fields.StringField(name=HightonConstants.ADDRESS)
super().__init__(**kwargs)
## Instruction:
Set EmailAddress Things to required
## Code After:
from highton.models import HightonModel
from highton.highton_constants import HightonConstants
from highton import fields
class EmailAddress(
HightonModel,
):
"""
:ivar id: fields.IntegerField(name=HightonConstants.ID)
:ivar location: fields.StringField(name=HightonConstants.LOCATION)
:ivar address: fields.StringField(name=HightonConstants.ADDRESS)
"""
TAG_NAME = HightonConstants.EMAIL_ADDRESS
def __init__(self, **kwargs):
self.location = fields.StringField(name=HightonConstants.LOCATION, required=True)
self.address = fields.StringField(name=HightonConstants.ADDRESS, required=True)
super().__init__(**kwargs)
|
from highton.models import HightonModel
from highton.highton_constants import HightonConstants
from highton import fields
class EmailAddress(
HightonModel,
):
"""
:ivar id: fields.IntegerField(name=HightonConstants.ID)
:ivar location: fields.StringField(name=HightonConstants.LOCATION)
:ivar address: fields.StringField(name=HightonConstants.ADDRESS)
"""
TAG_NAME = HightonConstants.EMAIL_ADDRESS
def __init__(self, **kwargs):
- self.location = fields.StringField(name=HightonConstants.LOCATION)
+ self.location = fields.StringField(name=HightonConstants.LOCATION, required=True)
? +++++++++++++++
- self.address = fields.StringField(name=HightonConstants.ADDRESS)
+ self.address = fields.StringField(name=HightonConstants.ADDRESS, required=True)
? +++++++++++++++
super().__init__(**kwargs)
|
232c0a600946e2a679947fe638938e56d2fa7709
|
vint/ast/parsing.py
|
vint/ast/parsing.py
|
import extlib.vimlparser
class Parser(object):
def __init__(self, plugins=None):
""" Initialize Parser with the specified plugins.
The plugins can add attributes to the AST.
"""
self.plugins = plugins or []
def parse(self, string):
""" Parse vim script string and return the AST. """
lines = string.split('\n')
reader = extlib.vimlparser.StringReader(lines)
parser = extlib.vimlparser.VimLParser()
ast = parser.parse(reader)
for plugin in self.plugins:
plugin.process(ast)
return ast
def parse_file(self, file_path):
""" Parse vim script file and return the AST. """
with file_path.open() as f:
return self.parse(f.read())
|
import extlib.vimlparser
class Parser(object):
def __init__(self, plugins=None):
""" Initialize Parser with the specified plugins.
The plugins can add attributes to the AST.
"""
self.plugins = plugins or []
def parse(self, string):
""" Parse vim script string and return the AST. """
lines = string.split('\n')
reader = extlib.vimlparser.StringReader(lines)
parser = extlib.vimlparser.VimLParser()
ast = parser.parse(reader)
# TOPLEVEL does not have a pos, but we need pos for all nodes
ast['pos'] = {'col': 1, 'i': 0, 'lnum': 1}
for plugin in self.plugins:
plugin.process(ast)
return ast
def parse_file(self, file_path):
""" Parse vim script file and return the AST. """
with file_path.open() as f:
return self.parse(f.read())
|
Add TOPLEVEL pos to unify node pos interface
|
Add TOPLEVEL pos to unify node pos interface
|
Python
|
mit
|
RianFuro/vint,Kuniwak/vint,RianFuro/vint,Kuniwak/vint
|
import extlib.vimlparser
class Parser(object):
def __init__(self, plugins=None):
""" Initialize Parser with the specified plugins.
The plugins can add attributes to the AST.
"""
self.plugins = plugins or []
def parse(self, string):
""" Parse vim script string and return the AST. """
lines = string.split('\n')
reader = extlib.vimlparser.StringReader(lines)
parser = extlib.vimlparser.VimLParser()
ast = parser.parse(reader)
+ # TOPLEVEL does not have a pos, but we need pos for all nodes
+ ast['pos'] = {'col': 1, 'i': 0, 'lnum': 1}
+
for plugin in self.plugins:
plugin.process(ast)
return ast
def parse_file(self, file_path):
""" Parse vim script file and return the AST. """
with file_path.open() as f:
return self.parse(f.read())
|
Add TOPLEVEL pos to unify node pos interface
|
## Code Before:
import extlib.vimlparser
class Parser(object):
def __init__(self, plugins=None):
""" Initialize Parser with the specified plugins.
The plugins can add attributes to the AST.
"""
self.plugins = plugins or []
def parse(self, string):
""" Parse vim script string and return the AST. """
lines = string.split('\n')
reader = extlib.vimlparser.StringReader(lines)
parser = extlib.vimlparser.VimLParser()
ast = parser.parse(reader)
for plugin in self.plugins:
plugin.process(ast)
return ast
def parse_file(self, file_path):
""" Parse vim script file and return the AST. """
with file_path.open() as f:
return self.parse(f.read())
## Instruction:
Add TOPLEVEL pos to unify node pos interface
## Code After:
import extlib.vimlparser
class Parser(object):
def __init__(self, plugins=None):
""" Initialize Parser with the specified plugins.
The plugins can add attributes to the AST.
"""
self.plugins = plugins or []
def parse(self, string):
""" Parse vim script string and return the AST. """
lines = string.split('\n')
reader = extlib.vimlparser.StringReader(lines)
parser = extlib.vimlparser.VimLParser()
ast = parser.parse(reader)
# TOPLEVEL does not have a pos, but we need pos for all nodes
ast['pos'] = {'col': 1, 'i': 0, 'lnum': 1}
for plugin in self.plugins:
plugin.process(ast)
return ast
def parse_file(self, file_path):
""" Parse vim script file and return the AST. """
with file_path.open() as f:
return self.parse(f.read())
|
import extlib.vimlparser
class Parser(object):
def __init__(self, plugins=None):
""" Initialize Parser with the specified plugins.
The plugins can add attributes to the AST.
"""
self.plugins = plugins or []
def parse(self, string):
""" Parse vim script string and return the AST. """
lines = string.split('\n')
reader = extlib.vimlparser.StringReader(lines)
parser = extlib.vimlparser.VimLParser()
ast = parser.parse(reader)
+ # TOPLEVEL does not have a pos, but we need pos for all nodes
+ ast['pos'] = {'col': 1, 'i': 0, 'lnum': 1}
+
for plugin in self.plugins:
plugin.process(ast)
return ast
def parse_file(self, file_path):
""" Parse vim script file and return the AST. """
with file_path.open() as f:
return self.parse(f.read())
|
b45c0cc0e9f2964ad442115f7a83292fb83611ec
|
test/vim_autopep8.py
|
test/vim_autopep8.py
|
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
|
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
|
Put code in main function
|
Put code in main function
|
Python
|
mit
|
SG345/autopep8,MeteorAdminz/autopep8,Vauxoo/autopep8,hhatto/autopep8,Vauxoo/autopep8,MeteorAdminz/autopep8,SG345/autopep8,vauxoo-dev/autopep8,vauxoo-dev/autopep8,hhatto/autopep8
|
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
+ def main():
- if vim.eval('&syntax') == 'python':
+ if vim.eval('&syntax') != 'python':
+ return
+
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
+
+ if __name__ == '__main__':
+ main()
+
|
Put code in main function
|
## Code Before:
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
## Instruction:
Put code in main function
## Code After:
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
|
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
+ def main():
- if vim.eval('&syntax') == 'python':
? ^
+ if vim.eval('&syntax') != 'python':
? ++++ ^
+ return
+
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
+
+
+ if __name__ == '__main__':
+ main()
|
a4f09620d8939aa8141b39972fb49d82f5380875
|
src/build/console.py
|
src/build/console.py
|
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time()*1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds//10000
timestamp = "{:02}:{:02}.{:02}".format(m, s, ms)
if operation:
print("{} {:^15s} {}".format(timestamp, operation, message))
else:
print("{} {}".format(timestamp, message))
|
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time() * 1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds // 10000
timestamp = "{:2}:{:02}.{:02}".format(m, s, ms)
if operation:
print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message))
else:
print("\033[34m{}\033[0m {}".format(timestamp, message))
|
Add colored time in output
|
Add colored time in output
|
Python
|
mpl-2.0
|
seleznev/firefox-complete-theme-build-system
|
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
- current_time = int(round(time.time()*1000))
+ current_time = int(round(time.time() * 1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
- ms = d.microseconds//10000
+ ms = d.microseconds // 10000
- timestamp = "{:02}:{:02}.{:02}".format(m, s, ms)
+ timestamp = "{:2}:{:02}.{:02}".format(m, s, ms)
if operation:
- print("{} {:^15s} {}".format(timestamp, operation, message))
+ print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message))
else:
- print("{} {}".format(timestamp, message))
+ print("\033[34m{}\033[0m {}".format(timestamp, message))
|
Add colored time in output
|
## Code Before:
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time()*1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds//10000
timestamp = "{:02}:{:02}.{:02}".format(m, s, ms)
if operation:
print("{} {:^15s} {}".format(timestamp, operation, message))
else:
print("{} {}".format(timestamp, message))
## Instruction:
Add colored time in output
## Code After:
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time() * 1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds // 10000
timestamp = "{:2}:{:02}.{:02}".format(m, s, ms)
if operation:
print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message))
else:
print("\033[34m{}\033[0m {}".format(timestamp, message))
|
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
- current_time = int(round(time.time()*1000))
+ current_time = int(round(time.time() * 1000))
? + +
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
- ms = d.microseconds//10000
+ ms = d.microseconds // 10000
? + +
- timestamp = "{:02}:{:02}.{:02}".format(m, s, ms)
? -
+ timestamp = "{:2}:{:02}.{:02}".format(m, s, ms)
if operation:
- print("{} {:^15s} {}".format(timestamp, operation, message))
? ^
+ print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message))
? ++++++++ ^^^^^^^
else:
- print("{} {}".format(timestamp, message))
+ print("\033[34m{}\033[0m {}".format(timestamp, message))
? ++++++++ +++++++
|
bf6d4c4622b9a0161fad3b03422747fb16faf5de
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='BitstampClient',
version='0.1',
packages=['bitstamp'],
url='',
license='MIT',
author='Kamil Madac',
author_email='[email protected]',
description='Bitstamp API python implementation',
requires=['requests']
)
|
from distutils.core import setup
setup(
name='bitstamp-python-client',
version='0.1',
packages=['bitstamp'],
url='',
license='MIT',
author='Kamil Madac',
author_email='[email protected]',
description='Bitstamp API python implementation',
requires=['requests']
)
|
Rename because of clash with original package.
|
Rename because of clash with original package.
|
Python
|
mit
|
nederhoed/bitstamp-python-client
|
from distutils.core import setup
setup(
- name='BitstampClient',
+ name='bitstamp-python-client',
version='0.1',
packages=['bitstamp'],
url='',
license='MIT',
author='Kamil Madac',
author_email='[email protected]',
description='Bitstamp API python implementation',
requires=['requests']
)
|
Rename because of clash with original package.
|
## Code Before:
from distutils.core import setup
setup(
name='BitstampClient',
version='0.1',
packages=['bitstamp'],
url='',
license='MIT',
author='Kamil Madac',
author_email='[email protected]',
description='Bitstamp API python implementation',
requires=['requests']
)
## Instruction:
Rename because of clash with original package.
## Code After:
from distutils.core import setup
setup(
name='bitstamp-python-client',
version='0.1',
packages=['bitstamp'],
url='',
license='MIT',
author='Kamil Madac',
author_email='[email protected]',
description='Bitstamp API python implementation',
requires=['requests']
)
|
from distutils.core import setup
setup(
- name='BitstampClient',
? ^ ^
+ name='bitstamp-python-client',
? ^ ^^^^^^^^^
version='0.1',
packages=['bitstamp'],
url='',
license='MIT',
author='Kamil Madac',
author_email='[email protected]',
description='Bitstamp API python implementation',
requires=['requests']
)
|
c04d8dfaf3b4fcbddedb0973a501609ffb9472f6
|
simpleflow/settings/__init__.py
|
simpleflow/settings/__init__.py
|
import sys
from future.utils import iteritems
from . import base
def put_setting(key, value):
setattr(sys.modules[__name__], key, value)
_keys.add(key)
def configure(dct):
for k, v in iteritems(dct):
put_setting(k, v)
# initialize a list of settings names
_keys = set()
# look for settings and initialize them
configure(base.load())
|
from pprint import pformat
import sys
from future.utils import iteritems
from . import base
def put_setting(key, value):
setattr(sys.modules[__name__], key, value)
_keys.add(key)
def configure(dct):
for k, v in iteritems(dct):
put_setting(k, v)
def print_settings():
for key in sorted(_keys):
value = getattr(sys.modules[__name__], key)
print("{}={}".format(key, pformat(value)))
# initialize a list of settings names
_keys = set()
# look for settings and initialize them
configure(base.load())
|
Add utility method to print all settings
|
Add utility method to print all settings
|
Python
|
mit
|
botify-labs/simpleflow,botify-labs/simpleflow
|
+ from pprint import pformat
import sys
from future.utils import iteritems
from . import base
def put_setting(key, value):
setattr(sys.modules[__name__], key, value)
_keys.add(key)
def configure(dct):
for k, v in iteritems(dct):
put_setting(k, v)
+ def print_settings():
+ for key in sorted(_keys):
+ value = getattr(sys.modules[__name__], key)
+ print("{}={}".format(key, pformat(value)))
+
+
# initialize a list of settings names
_keys = set()
# look for settings and initialize them
configure(base.load())
|
Add utility method to print all settings
|
## Code Before:
import sys
from future.utils import iteritems
from . import base
def put_setting(key, value):
setattr(sys.modules[__name__], key, value)
_keys.add(key)
def configure(dct):
for k, v in iteritems(dct):
put_setting(k, v)
# initialize a list of settings names
_keys = set()
# look for settings and initialize them
configure(base.load())
## Instruction:
Add utility method to print all settings
## Code After:
from pprint import pformat
import sys
from future.utils import iteritems
from . import base
def put_setting(key, value):
setattr(sys.modules[__name__], key, value)
_keys.add(key)
def configure(dct):
for k, v in iteritems(dct):
put_setting(k, v)
def print_settings():
for key in sorted(_keys):
value = getattr(sys.modules[__name__], key)
print("{}={}".format(key, pformat(value)))
# initialize a list of settings names
_keys = set()
# look for settings and initialize them
configure(base.load())
|
+ from pprint import pformat
import sys
from future.utils import iteritems
from . import base
def put_setting(key, value):
setattr(sys.modules[__name__], key, value)
_keys.add(key)
def configure(dct):
for k, v in iteritems(dct):
put_setting(k, v)
+ def print_settings():
+ for key in sorted(_keys):
+ value = getattr(sys.modules[__name__], key)
+ print("{}={}".format(key, pformat(value)))
+
+
# initialize a list of settings names
_keys = set()
# look for settings and initialize them
configure(base.load())
|
1d5442aa70d2ed2569cc062d476129840d08a610
|
oscar/apps/shipping/repository.py
|
oscar/apps/shipping/repository.py
|
from django.core.exceptions import ImproperlyConfigured
from oscar.apps.shipping.methods import Free, NoShippingRequired
class Repository(object):
"""
Repository class responsible for returning ShippingMethod
objects for a given user, basket etc
"""
def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs):
"""
Return a list of all applicable shipping method objects
for a given basket.
We default to returning the Method models that have been defined but
this behaviour can easily be overridden by subclassing this class
and overriding this method.
"""
methods = [Free()]
return self.add_basket_to_methods(basket, methods)
def get_default_shipping_method(self, user, basket, shipping_addr=None, **kwargs):
methods = self.get_shipping_methods(user, basket, shipping_addr, **kwargs)
if len(methods) == 0:
raise ImproperlyConfigured("You need to define some shipping methods")
return methods[0]
def add_basket_to_methods(self, basket, methods):
for method in methods:
method.set_basket(basket)
return methods
def find_by_code(self, code):
"""
Return the appropriate Method object for the given code
"""
known_methods = [Free, NoShippingRequired]
for klass in known_methods:
if code == getattr(klass, 'code'):
return klass()
return None
|
from django.core.exceptions import ImproperlyConfigured
from oscar.apps.shipping.methods import Free, NoShippingRequired
class Repository(object):
"""
Repository class responsible for returning ShippingMethod
objects for a given user, basket etc
"""
def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs):
"""
Return a list of all applicable shipping method objects
for a given basket.
We default to returning the Method models that have been defined but
this behaviour can easily be overridden by subclassing this class
and overriding this method.
"""
methods = [Free()]
return self.add_basket_to_methods(basket, methods)
def get_default_shipping_method(self, user, basket, shipping_addr=None, **kwargs):
methods = self.get_shipping_methods(user, basket, shipping_addr, **kwargs)
if len(methods) == 0:
raise ImproperlyConfigured("You need to define some shipping methods")
return min(methods, key=lambda method: method.basket_charge_incl_tax())
def add_basket_to_methods(self, basket, methods):
for method in methods:
method.set_basket(basket)
return methods
def find_by_code(self, code):
"""
Return the appropriate Method object for the given code
"""
known_methods = [Free, NoShippingRequired]
for klass in known_methods:
if code == getattr(klass, 'code'):
return klass()
return None
|
Make the cheapest shipping method the default one
|
Make the cheapest shipping method the default one
|
Python
|
bsd-3-clause
|
mexeniz/django-oscar,kapari/django-oscar,makielab/django-oscar,eddiep1101/django-oscar,Jannes123/django-oscar,thechampanurag/django-oscar,rocopartners/django-oscar,john-parton/django-oscar,elliotthill/django-oscar,itbabu/django-oscar,mexeniz/django-oscar,jinnykoo/wuyisj.com,marcoantoniooliveira/labweb,sonofatailor/django-oscar,vovanbo/django-oscar,ka7eh/django-oscar,jlmadurga/django-oscar,saadatqadri/django-oscar,amirrpp/django-oscar,makielab/django-oscar,bnprk/django-oscar,okfish/django-oscar,ahmetdaglarbas/e-commerce,manevant/django-oscar,ka7eh/django-oscar,kapari/django-oscar,MatthewWilkes/django-oscar,lijoantony/django-oscar,jmt4/django-oscar,elliotthill/django-oscar,anentropic/django-oscar,django-oscar/django-oscar,pdonadeo/django-oscar,jlmadurga/django-oscar,pdonadeo/django-oscar,django-oscar/django-oscar,john-parton/django-oscar,pasqualguerrero/django-oscar,okfish/django-oscar,josesanch/django-oscar,nfletton/django-oscar,WadeYuChen/django-oscar,binarydud/django-oscar,faratro/django-oscar,Jannes123/django-oscar,Idematica/django-oscar,dongguangming/django-oscar,WillisXChen/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,rocopartners/django-oscar,nickpack/django-oscar,jlmadurga/django-oscar,sasha0/django-oscar,nfletton/django-oscar,WillisXChen/django-oscar,solarissmoke/django-oscar,taedori81/django-oscar,manevant/django-oscar,dongguangming/django-oscar,adamend/django-oscar,kapt/django-oscar,lijoantony/django-oscar,okfish/django-oscar,nfletton/django-oscar,Idematica/django-oscar,adamend/django-oscar,marcoantoniooliveira/labweb,faratro/django-oscar,solarissmoke/django-oscar,nickpack/django-oscar,nickpack/django-oscar,django-oscar/django-oscar,spartonia/django-oscar,spartonia/django-oscar,okfish/django-oscar,sonofatailor/django-oscar,WadeYuChen/django-oscar,pasqualguerrero/django-oscar,DrOctogon/unwash_ecom,kapari/django-oscar,MatthewWilkes/django-oscar,jinnykoo/christmas,makielab/django-oscar,QLGu/django-oscar,adamend/django-oscar,sasha0/django-oscar,michaelkuty/django-oscar,ademuk/django-oscar,john-parton/django-oscar,Bogh/django-oscar,bschuon/django-oscar,jmt4/django-oscar,pasqualguerrero/django-oscar,WillisXChen/django-oscar,makielab/django-oscar,django-oscar/django-oscar,vovanbo/django-oscar,saadatqadri/django-oscar,DrOctogon/unwash_ecom,dongguangming/django-oscar,thechampanurag/django-oscar,QLGu/django-oscar,jlmadurga/django-oscar,thechampanurag/django-oscar,monikasulik/django-oscar,binarydud/django-oscar,anentropic/django-oscar,monikasulik/django-oscar,jinnykoo/wuyisj.com,manevant/django-oscar,jinnykoo/wuyisj,rocopartners/django-oscar,MatthewWilkes/django-oscar,Jannes123/django-oscar,lijoantony/django-oscar,john-parton/django-oscar,kapari/django-oscar,anentropic/django-oscar,WadeYuChen/django-oscar,ahmetdaglarbas/e-commerce,monikasulik/django-oscar,marcoantoniooliveira/labweb,bschuon/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,michaelkuty/django-oscar,Idematica/django-oscar,adamend/django-oscar,bschuon/django-oscar,taedori81/django-oscar,WillisXChen/django-oscar,pdonadeo/django-oscar,nfletton/django-oscar,thechampanurag/django-oscar,sasha0/django-oscar,ahmetdaglarbas/e-commerce,spartonia/django-oscar,eddiep1101/django-oscar,jinnykoo/wuyisj.com,taedori81/django-oscar,manevant/django-oscar,pdonadeo/django-oscar,kapt/django-oscar,MatthewWilkes/django-oscar,josesanch/django-oscar,jmt4/django-oscar,WadeYuChen/django-oscar,mexeniz/django-oscar,mexeniz/django-oscar,sonofatailor/django-oscar,binarydud/django-oscar,eddiep1101/django-oscar,vovanbo/django-oscar,ahmetdaglarbas/e-commerce,machtfit/django-oscar,itbabu/django-oscar,sonofatailor/django-oscar,Jannes123/django-oscar,faratro/django-oscar,saadatqadri/django-oscar,bnprk/django-oscar,elliotthill/django-oscar,taedori81/django-oscar,rocopartners/django-oscar,vovanbo/django-oscar,bnprk/django-oscar,michaelkuty/django-oscar,anentropic/django-oscar,machtfit/django-oscar,ka7eh/django-oscar,binarydud/django-oscar,itbabu/django-oscar,marcoantoniooliveira/labweb,eddiep1101/django-oscar,dongguangming/django-oscar,itbabu/django-oscar,jinnykoo/christmas,Bogh/django-oscar,jmt4/django-oscar,ademuk/django-oscar,amirrpp/django-oscar,sasha0/django-oscar,amirrpp/django-oscar,ademuk/django-oscar,pasqualguerrero/django-oscar,faratro/django-oscar,machtfit/django-oscar,jinnykoo/wuyisj,solarissmoke/django-oscar,solarissmoke/django-oscar,amirrpp/django-oscar,QLGu/django-oscar,josesanch/django-oscar,ka7eh/django-oscar,nickpack/django-oscar,jinnykoo/wuyisj,jinnykoo/wuyisj,jinnykoo/wuyisj.com,bschuon/django-oscar,kapt/django-oscar,WillisXChen/django-oscar,Bogh/django-oscar,saadatqadri/django-oscar,ademuk/django-oscar,bnprk/django-oscar,spartonia/django-oscar,QLGu/django-oscar,DrOctogon/unwash_ecom,jinnykoo/christmas,Bogh/django-oscar
|
from django.core.exceptions import ImproperlyConfigured
from oscar.apps.shipping.methods import Free, NoShippingRequired
class Repository(object):
"""
Repository class responsible for returning ShippingMethod
objects for a given user, basket etc
"""
-
+
def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs):
"""
Return a list of all applicable shipping method objects
for a given basket.
-
+
We default to returning the Method models that have been defined but
this behaviour can easily be overridden by subclassing this class
and overriding this method.
- """
+ """
methods = [Free()]
return self.add_basket_to_methods(basket, methods)
def get_default_shipping_method(self, user, basket, shipping_addr=None, **kwargs):
methods = self.get_shipping_methods(user, basket, shipping_addr, **kwargs)
if len(methods) == 0:
raise ImproperlyConfigured("You need to define some shipping methods")
- return methods[0]
+ return min(methods, key=lambda method: method.basket_charge_incl_tax())
def add_basket_to_methods(self, basket, methods):
for method in methods:
method.set_basket(basket)
return methods
def find_by_code(self, code):
"""
Return the appropriate Method object for the given code
"""
known_methods = [Free, NoShippingRequired]
for klass in known_methods:
if code == getattr(klass, 'code'):
return klass()
return None
|
Make the cheapest shipping method the default one
|
## Code Before:
from django.core.exceptions import ImproperlyConfigured
from oscar.apps.shipping.methods import Free, NoShippingRequired
class Repository(object):
"""
Repository class responsible for returning ShippingMethod
objects for a given user, basket etc
"""
def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs):
"""
Return a list of all applicable shipping method objects
for a given basket.
We default to returning the Method models that have been defined but
this behaviour can easily be overridden by subclassing this class
and overriding this method.
"""
methods = [Free()]
return self.add_basket_to_methods(basket, methods)
def get_default_shipping_method(self, user, basket, shipping_addr=None, **kwargs):
methods = self.get_shipping_methods(user, basket, shipping_addr, **kwargs)
if len(methods) == 0:
raise ImproperlyConfigured("You need to define some shipping methods")
return methods[0]
def add_basket_to_methods(self, basket, methods):
for method in methods:
method.set_basket(basket)
return methods
def find_by_code(self, code):
"""
Return the appropriate Method object for the given code
"""
known_methods = [Free, NoShippingRequired]
for klass in known_methods:
if code == getattr(klass, 'code'):
return klass()
return None
## Instruction:
Make the cheapest shipping method the default one
## Code After:
from django.core.exceptions import ImproperlyConfigured
from oscar.apps.shipping.methods import Free, NoShippingRequired
class Repository(object):
"""
Repository class responsible for returning ShippingMethod
objects for a given user, basket etc
"""
def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs):
"""
Return a list of all applicable shipping method objects
for a given basket.
We default to returning the Method models that have been defined but
this behaviour can easily be overridden by subclassing this class
and overriding this method.
"""
methods = [Free()]
return self.add_basket_to_methods(basket, methods)
def get_default_shipping_method(self, user, basket, shipping_addr=None, **kwargs):
methods = self.get_shipping_methods(user, basket, shipping_addr, **kwargs)
if len(methods) == 0:
raise ImproperlyConfigured("You need to define some shipping methods")
return min(methods, key=lambda method: method.basket_charge_incl_tax())
def add_basket_to_methods(self, basket, methods):
for method in methods:
method.set_basket(basket)
return methods
def find_by_code(self, code):
"""
Return the appropriate Method object for the given code
"""
known_methods = [Free, NoShippingRequired]
for klass in known_methods:
if code == getattr(klass, 'code'):
return klass()
return None
|
from django.core.exceptions import ImproperlyConfigured
from oscar.apps.shipping.methods import Free, NoShippingRequired
class Repository(object):
"""
Repository class responsible for returning ShippingMethod
objects for a given user, basket etc
"""
-
+
def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs):
"""
Return a list of all applicable shipping method objects
for a given basket.
-
+
We default to returning the Method models that have been defined but
this behaviour can easily be overridden by subclassing this class
and overriding this method.
- """
? -
+ """
methods = [Free()]
return self.add_basket_to_methods(basket, methods)
def get_default_shipping_method(self, user, basket, shipping_addr=None, **kwargs):
methods = self.get_shipping_methods(user, basket, shipping_addr, **kwargs)
if len(methods) == 0:
raise ImproperlyConfigured("You need to define some shipping methods")
- return methods[0]
+ return min(methods, key=lambda method: method.basket_charge_incl_tax())
def add_basket_to_methods(self, basket, methods):
for method in methods:
method.set_basket(basket)
return methods
def find_by_code(self, code):
"""
Return the appropriate Method object for the given code
"""
known_methods = [Free, NoShippingRequired]
for klass in known_methods:
if code == getattr(klass, 'code'):
return klass()
return None
|
fc123442727ae25f03c5aa8d2fa7bc6fae388ae2
|
thecure/levels/level1.py
|
thecure/levels/level1.py
|
from thecure.levels.base import Level
from thecure.sprites import Direction, InfectedHuman
class Level1(Level):
name = 'level1'
start_pos = (900, 6200)
def setup(self):
boy = InfectedHuman('boy1')
self.main_layer.add(boy)
boy.move_to(300, 160)
boy.set_direction(Direction.DOWN)
girl = InfectedHuman('girl1')
self.main_layer.add(girl)
girl.move_to(470, 200)
girl.set_direction(Direction.LEFT)
def draw_bg(self, surface):
surface.fill((237, 243, 255))
|
from thecure.levels.base import Level
from thecure.sprites import Direction, InfectedHuman
class Level1(Level):
name = 'level1'
start_pos = (900, 6200)
def setup(self):
boy = InfectedHuman('boy1')
self.main_layer.add(boy)
boy.move_to(1536, 5696)
boy.set_direction(Direction.DOWN)
girl = InfectedHuman('girl1')
self.main_layer.add(girl)
girl.move_to(1536, 5824)
girl.set_direction(Direction.UP)
def draw_bg(self, surface):
surface.fill((237, 243, 255))
|
Move the kids onto the field.
|
Move the kids onto the field.
|
Python
|
mit
|
chipx86/the-cure
|
from thecure.levels.base import Level
from thecure.sprites import Direction, InfectedHuman
class Level1(Level):
name = 'level1'
start_pos = (900, 6200)
def setup(self):
boy = InfectedHuman('boy1')
self.main_layer.add(boy)
- boy.move_to(300, 160)
+ boy.move_to(1536, 5696)
boy.set_direction(Direction.DOWN)
girl = InfectedHuman('girl1')
self.main_layer.add(girl)
- girl.move_to(470, 200)
+ girl.move_to(1536, 5824)
- girl.set_direction(Direction.LEFT)
+ girl.set_direction(Direction.UP)
def draw_bg(self, surface):
surface.fill((237, 243, 255))
|
Move the kids onto the field.
|
## Code Before:
from thecure.levels.base import Level
from thecure.sprites import Direction, InfectedHuman
class Level1(Level):
name = 'level1'
start_pos = (900, 6200)
def setup(self):
boy = InfectedHuman('boy1')
self.main_layer.add(boy)
boy.move_to(300, 160)
boy.set_direction(Direction.DOWN)
girl = InfectedHuman('girl1')
self.main_layer.add(girl)
girl.move_to(470, 200)
girl.set_direction(Direction.LEFT)
def draw_bg(self, surface):
surface.fill((237, 243, 255))
## Instruction:
Move the kids onto the field.
## Code After:
from thecure.levels.base import Level
from thecure.sprites import Direction, InfectedHuman
class Level1(Level):
name = 'level1'
start_pos = (900, 6200)
def setup(self):
boy = InfectedHuman('boy1')
self.main_layer.add(boy)
boy.move_to(1536, 5696)
boy.set_direction(Direction.DOWN)
girl = InfectedHuman('girl1')
self.main_layer.add(girl)
girl.move_to(1536, 5824)
girl.set_direction(Direction.UP)
def draw_bg(self, surface):
surface.fill((237, 243, 255))
|
from thecure.levels.base import Level
from thecure.sprites import Direction, InfectedHuman
class Level1(Level):
name = 'level1'
start_pos = (900, 6200)
def setup(self):
boy = InfectedHuman('boy1')
self.main_layer.add(boy)
- boy.move_to(300, 160)
? ^^ ^ ^
+ boy.move_to(1536, 5696)
? ++ ^ ^ ^^
boy.set_direction(Direction.DOWN)
girl = InfectedHuman('girl1')
self.main_layer.add(girl)
- girl.move_to(470, 200)
? ^^^ ^^
+ girl.move_to(1536, 5824)
? ^^^^ ++ ^
- girl.set_direction(Direction.LEFT)
? ^^^^
+ girl.set_direction(Direction.UP)
? ^^
def draw_bg(self, surface):
surface.fill((237, 243, 255))
|
72fb6ca12b685809bd5de0c5df9f051eef1163c4
|
test/TestBaseUtils.py
|
test/TestBaseUtils.py
|
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
def test_word_segmenter(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_whitespace(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_special_chars(self):
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
''' Tests for BaseUtils
'''
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
''' Main test class for the BaseUtils '''
def test_word_segmenter_with_empty(self):
''' For an empty string, the segmenter returns
just an empty list '''
segments = BaseUtils.get_words('')
self.assertEqual(segments, [])
def test_word_segmenter(self):
''' The word segmenter returns the expected
array of strings '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_whitespace(self):
''' Whitespace in the input string is ignored
in the input string '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_special_chars(self):
''' If there are special characters in the input,
they are ignored as well '''
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
Add test for empty string; cleanup
|
Add test for empty string; cleanup
|
Python
|
bsd-2-clause
|
ambidextrousTx/RNLTK
|
+ ''' Tests for BaseUtils
+ '''
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
+ ''' Main test class for the BaseUtils '''
+ def test_word_segmenter_with_empty(self):
+ ''' For an empty string, the segmenter returns
+ just an empty list '''
+ segments = BaseUtils.get_words('')
+ self.assertEqual(segments, [])
+
def test_word_segmenter(self):
+ ''' The word segmenter returns the expected
+ array of strings '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
- def test_word_segmenter_ignores_whitespace(self):
+ def test_ignoring_whitespace(self):
+ ''' Whitespace in the input string is ignored
+ in the input string '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
- def test_word_segmenter_ignores_special_chars(self):
+ def test_ignoring_special_chars(self):
+ ''' If there are special characters in the input,
+ they are ignored as well '''
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
Add test for empty string; cleanup
|
## Code Before:
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
def test_word_segmenter(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_whitespace(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_special_chars(self):
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
## Instruction:
Add test for empty string; cleanup
## Code After:
''' Tests for BaseUtils
'''
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
''' Main test class for the BaseUtils '''
def test_word_segmenter_with_empty(self):
''' For an empty string, the segmenter returns
just an empty list '''
segments = BaseUtils.get_words('')
self.assertEqual(segments, [])
def test_word_segmenter(self):
''' The word segmenter returns the expected
array of strings '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_whitespace(self):
''' Whitespace in the input string is ignored
in the input string '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_special_chars(self):
''' If there are special characters in the input,
they are ignored as well '''
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
+ ''' Tests for BaseUtils
+ '''
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
+ ''' Main test class for the BaseUtils '''
+ def test_word_segmenter_with_empty(self):
+ ''' For an empty string, the segmenter returns
+ just an empty list '''
+ segments = BaseUtils.get_words('')
+ self.assertEqual(segments, [])
+
def test_word_segmenter(self):
+ ''' The word segmenter returns the expected
+ array of strings '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
- def test_word_segmenter_ignores_whitespace(self):
? --------------- ^^
+ def test_ignoring_whitespace(self):
? ^^^
+ ''' Whitespace in the input string is ignored
+ in the input string '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
- def test_word_segmenter_ignores_special_chars(self):
? --------------- ^^
+ def test_ignoring_special_chars(self):
? ^^^
+ ''' If there are special characters in the input,
+ they are ignored as well '''
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
9909fe549753d13355552c7462f16c42908d4b21
|
ligand/urls.py
|
ligand/urls.py
|
from django.conf.urls import url
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView
from ligand.views import *
urlpatterns = [
url(r'^$', cache_page(3600*24*7)(LigandBrowser.as_view()), name='ligand_browser'),
url(r'^target/all/(?P<slug>[-\w]+)/$',TargetDetails, name='ligand_target_detail'),
url(r'^target/compact/(?P<slug>[-\w]+)/$',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets$',TargetDetails, name='ligand_target_detail'),
url(r'^targets_compact',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets_purchasable',TargetPurchasabilityDetails, name='ligand_target_detail_purchasable'),
url(r'^(?P<ligand_id>[-\w]+)/$',LigandDetails, name='ligand_detail'),
url(r'^statistics', LigandStatistics.as_view(), name='ligand_statistics')
]
|
from django.conf.urls import url
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView
from ligand.views import *
urlpatterns = [
url(r'^$', cache_page(3600*24*7)(LigandBrowser.as_view()), name='ligand_browser'),
url(r'^target/all/(?P<slug>[-\w]+)/$',TargetDetails, name='ligand_target_detail'),
url(r'^target/compact/(?P<slug>[-\w]+)/$',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets$',TargetDetails, name='ligand_target_detail'),
url(r'^targets_compact',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets_purchasable',TargetPurchasabilityDetails, name='ligand_target_detail_purchasable'),
url(r'^(?P<ligand_id>[-\w]+)/$',LigandDetails, name='ligand_detail'),
url(r'^statistics', cache_page(3600*24*7)(LigandStatistics.as_view()), name='ligand_statistics')
]
|
Add caching to ligand statistics
|
Add caching to ligand statistics
|
Python
|
apache-2.0
|
cmunk/protwis,protwis/protwis,cmunk/protwis,cmunk/protwis,protwis/protwis,cmunk/protwis,protwis/protwis
|
from django.conf.urls import url
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView
from ligand.views import *
urlpatterns = [
url(r'^$', cache_page(3600*24*7)(LigandBrowser.as_view()), name='ligand_browser'),
url(r'^target/all/(?P<slug>[-\w]+)/$',TargetDetails, name='ligand_target_detail'),
url(r'^target/compact/(?P<slug>[-\w]+)/$',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets$',TargetDetails, name='ligand_target_detail'),
url(r'^targets_compact',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets_purchasable',TargetPurchasabilityDetails, name='ligand_target_detail_purchasable'),
url(r'^(?P<ligand_id>[-\w]+)/$',LigandDetails, name='ligand_detail'),
- url(r'^statistics', LigandStatistics.as_view(), name='ligand_statistics')
+ url(r'^statistics', cache_page(3600*24*7)(LigandStatistics.as_view()), name='ligand_statistics')
]
|
Add caching to ligand statistics
|
## Code Before:
from django.conf.urls import url
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView
from ligand.views import *
urlpatterns = [
url(r'^$', cache_page(3600*24*7)(LigandBrowser.as_view()), name='ligand_browser'),
url(r'^target/all/(?P<slug>[-\w]+)/$',TargetDetails, name='ligand_target_detail'),
url(r'^target/compact/(?P<slug>[-\w]+)/$',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets$',TargetDetails, name='ligand_target_detail'),
url(r'^targets_compact',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets_purchasable',TargetPurchasabilityDetails, name='ligand_target_detail_purchasable'),
url(r'^(?P<ligand_id>[-\w]+)/$',LigandDetails, name='ligand_detail'),
url(r'^statistics', LigandStatistics.as_view(), name='ligand_statistics')
]
## Instruction:
Add caching to ligand statistics
## Code After:
from django.conf.urls import url
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView
from ligand.views import *
urlpatterns = [
url(r'^$', cache_page(3600*24*7)(LigandBrowser.as_view()), name='ligand_browser'),
url(r'^target/all/(?P<slug>[-\w]+)/$',TargetDetails, name='ligand_target_detail'),
url(r'^target/compact/(?P<slug>[-\w]+)/$',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets$',TargetDetails, name='ligand_target_detail'),
url(r'^targets_compact',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets_purchasable',TargetPurchasabilityDetails, name='ligand_target_detail_purchasable'),
url(r'^(?P<ligand_id>[-\w]+)/$',LigandDetails, name='ligand_detail'),
url(r'^statistics', cache_page(3600*24*7)(LigandStatistics.as_view()), name='ligand_statistics')
]
|
from django.conf.urls import url
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView
from ligand.views import *
urlpatterns = [
url(r'^$', cache_page(3600*24*7)(LigandBrowser.as_view()), name='ligand_browser'),
url(r'^target/all/(?P<slug>[-\w]+)/$',TargetDetails, name='ligand_target_detail'),
url(r'^target/compact/(?P<slug>[-\w]+)/$',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets$',TargetDetails, name='ligand_target_detail'),
url(r'^targets_compact',TargetDetailsCompact, name='ligand_target_detail_compact'),
url(r'^targets_purchasable',TargetPurchasabilityDetails, name='ligand_target_detail_purchasable'),
url(r'^(?P<ligand_id>[-\w]+)/$',LigandDetails, name='ligand_detail'),
- url(r'^statistics', LigandStatistics.as_view(), name='ligand_statistics')
+ url(r'^statistics', cache_page(3600*24*7)(LigandStatistics.as_view()), name='ligand_statistics')
? ++++++++++++++++++++++ +
]
|
20a801255ab505641e1ec0d449a4b36411c673bc
|
indra/tests/test_tas.py
|
indra/tests/test_tas.py
|
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1601159, num_stmts
assert all(len(s.evidence) == 1 for s in tp.statements), \
"Some statements lack evidence, or have extra evidence."
|
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1175682, num_stmts
assert all(len(s.evidence) >= 1 for s in tp.statements), \
'Some statements lack any evidence'
|
Update test for current evidence aggregation
|
Update test for current evidence aggregation
|
Python
|
bsd-2-clause
|
sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/belpy,bgyori/indra,bgyori/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/belpy,johnbachman/indra,johnbachman/indra,bgyori/indra
|
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
- assert num_stmts == 1601159, num_stmts
+ assert num_stmts == 1175682, num_stmts
- assert all(len(s.evidence) == 1 for s in tp.statements), \
+ assert all(len(s.evidence) >= 1 for s in tp.statements), \
- "Some statements lack evidence, or have extra evidence."
+ 'Some statements lack any evidence'
|
Update test for current evidence aggregation
|
## Code Before:
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1601159, num_stmts
assert all(len(s.evidence) == 1 for s in tp.statements), \
"Some statements lack evidence, or have extra evidence."
## Instruction:
Update test for current evidence aggregation
## Code After:
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1175682, num_stmts
assert all(len(s.evidence) >= 1 for s in tp.statements), \
'Some statements lack any evidence'
|
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
- assert num_stmts == 1601159, num_stmts
? ^^^^^
+ assert num_stmts == 1175682, num_stmts
? +++ ^^
- assert all(len(s.evidence) == 1 for s in tp.statements), \
? ^
+ assert all(len(s.evidence) >= 1 for s in tp.statements), \
? ^
- "Some statements lack evidence, or have extra evidence."
+ 'Some statements lack any evidence'
|
2daee974533d1510a17280cddb5a4dfc147338fa
|
tests/level/test_map.py
|
tests/level/test_map.py
|
import unittest
from hunting.level.map import LevelTile, LevelMap
class TestPathfinding(unittest.TestCase):
def test_basic_diagonal(self):
level_map = LevelMap()
level_map.set_map([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)])
self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4))
def test_paths_around_wall(self):
level_map = LevelMap()
level_map.set_map([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)])
for x in range(1, 5):
level_map[x][1].blocks = True
self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)],
level_map.a_star_path(4, 0, 4, 2))
|
import unittest
from hunting.level.map import LevelTile, LevelMap
class TestPathfinding(unittest.TestCase):
def test_basic_diagonal(self):
level_map = LevelMap([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)])
self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4))
def test_paths_around_wall(self):
level_map = LevelMap([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)])
for x in range(1, 5):
level_map[x][1].blocks = True
self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)],
level_map.a_star_path(4, 0, 4, 2))
def tests_force_pathable_endpoint_parameter(self):
level_map = LevelMap([[LevelTile(False, False)], [LevelTile(True, True)]])
self.assertEqual([(1, 0)], level_map.a_star_path(0, 0, 1, 0, True))
self.assertEqual([], level_map.a_star_path(0, 0, 1, 0, False))
|
Add test for force_pathable_endpoint pathfind param
|
Add test for force_pathable_endpoint pathfind param
This parameter is intended to allow pathing to adjacent squares
of an unpassable square. This is necessary because if you want to
pathfind to a monster which blocks a square, you don't want to
actually go *onto* the square, you just want to go next to it,
presumably so you can hit it.
|
Python
|
mit
|
MoyTW/RL_Arena_Experiment
|
import unittest
from hunting.level.map import LevelTile, LevelMap
class TestPathfinding(unittest.TestCase):
def test_basic_diagonal(self):
- level_map = LevelMap()
- level_map.set_map([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)])
+ level_map = LevelMap([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)])
self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4))
def test_paths_around_wall(self):
- level_map = LevelMap()
- level_map.set_map([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)])
+ level_map = LevelMap([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)])
for x in range(1, 5):
level_map[x][1].blocks = True
self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)],
level_map.a_star_path(4, 0, 4, 2))
+ def tests_force_pathable_endpoint_parameter(self):
+ level_map = LevelMap([[LevelTile(False, False)], [LevelTile(True, True)]])
+
+ self.assertEqual([(1, 0)], level_map.a_star_path(0, 0, 1, 0, True))
+ self.assertEqual([], level_map.a_star_path(0, 0, 1, 0, False))
+
|
Add test for force_pathable_endpoint pathfind param
|
## Code Before:
import unittest
from hunting.level.map import LevelTile, LevelMap
class TestPathfinding(unittest.TestCase):
def test_basic_diagonal(self):
level_map = LevelMap()
level_map.set_map([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)])
self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4))
def test_paths_around_wall(self):
level_map = LevelMap()
level_map.set_map([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)])
for x in range(1, 5):
level_map[x][1].blocks = True
self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)],
level_map.a_star_path(4, 0, 4, 2))
## Instruction:
Add test for force_pathable_endpoint pathfind param
## Code After:
import unittest
from hunting.level.map import LevelTile, LevelMap
class TestPathfinding(unittest.TestCase):
def test_basic_diagonal(self):
level_map = LevelMap([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)])
self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4))
def test_paths_around_wall(self):
level_map = LevelMap([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)])
for x in range(1, 5):
level_map[x][1].blocks = True
self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)],
level_map.a_star_path(4, 0, 4, 2))
def tests_force_pathable_endpoint_parameter(self):
level_map = LevelMap([[LevelTile(False, False)], [LevelTile(True, True)]])
self.assertEqual([(1, 0)], level_map.a_star_path(0, 0, 1, 0, True))
self.assertEqual([], level_map.a_star_path(0, 0, 1, 0, False))
|
import unittest
from hunting.level.map import LevelTile, LevelMap
class TestPathfinding(unittest.TestCase):
def test_basic_diagonal(self):
- level_map = LevelMap()
- level_map.set_map([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)])
? ^^ ^^^
+ level_map = LevelMap([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)])
? ^^^^ ^^^^
self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4))
def test_paths_around_wall(self):
- level_map = LevelMap()
- level_map.set_map([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)])
? ^^ ^^^
+ level_map = LevelMap([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)])
? ^^^^ ^^^^
for x in range(1, 5):
level_map[x][1].blocks = True
self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)],
level_map.a_star_path(4, 0, 4, 2))
+
+ def tests_force_pathable_endpoint_parameter(self):
+ level_map = LevelMap([[LevelTile(False, False)], [LevelTile(True, True)]])
+
+ self.assertEqual([(1, 0)], level_map.a_star_path(0, 0, 1, 0, True))
+ self.assertEqual([], level_map.a_star_path(0, 0, 1, 0, False))
|
8eb47d151868c8e5906af054749993cd46a73b2d
|
capstone/player/kerasplayer.py
|
capstone/player/kerasplayer.py
|
from keras.models import load_model
from . import Player
from ..utils import normalize_board, utility
class KerasPlayer(Player):
'''
Takes moves based on a Keras neural network model.
'''
name = 'Keras'
def __init__(self, filepath):
self.model = load_model(filepath)
def __str__(self):
return self.name
def __repr__(self):
return self.name
##########
# Player #
##########
def choose_move(self, state):
assert state.cur_player() == 0
best_action = None
best_value = -1000000
for action in state.legal_moves():
s = state.copy()
s = s.make_move(action)
value = self.model.predict(normalize_board(s.board), batch_size=1)
assert value >= -1.0 and value <= 1.0
if value > best_value:
best_action = action
best_value = value
return best_action
|
from keras.models import load_model
from . import Player
from ..utils import normalize_board, utility
class KerasPlayer(Player):
'''
Takes moves based on a Keras neural network model.
'''
name = 'Keras'
def __init__(self, filepath):
self.model = load_model(filepath)
def __str__(self):
return self.name
def __repr__(self):
return self.name
##########
# Player #
##########
def choose_move(self, game):
assert game.cur_player() == 0
best_move = None
best_value = -1000000
for move in game.legal_moves():
next_game = game.copy().make_move(move)
value = self.model.predict(normalize_board(next_game.board), batch_size=1)
assert value >= -1.0 and value <= 1.0
if value > best_value:
best_move = move
best_value = value
return best_move
|
Rename state to game in KerasPlayer
|
Rename state to game in KerasPlayer
|
Python
|
mit
|
davidrobles/mlnd-capstone-code
|
from keras.models import load_model
from . import Player
from ..utils import normalize_board, utility
class KerasPlayer(Player):
'''
Takes moves based on a Keras neural network model.
'''
name = 'Keras'
def __init__(self, filepath):
self.model = load_model(filepath)
def __str__(self):
return self.name
def __repr__(self):
return self.name
##########
# Player #
##########
- def choose_move(self, state):
+ def choose_move(self, game):
- assert state.cur_player() == 0
+ assert game.cur_player() == 0
- best_action = None
+ best_move = None
best_value = -1000000
- for action in state.legal_moves():
+ for move in game.legal_moves():
+ next_game = game.copy().make_move(move)
- s = state.copy()
- s = s.make_move(action)
- value = self.model.predict(normalize_board(s.board), batch_size=1)
+ value = self.model.predict(normalize_board(next_game.board), batch_size=1)
assert value >= -1.0 and value <= 1.0
if value > best_value:
- best_action = action
+ best_move = move
best_value = value
- return best_action
+ return best_move
|
Rename state to game in KerasPlayer
|
## Code Before:
from keras.models import load_model
from . import Player
from ..utils import normalize_board, utility
class KerasPlayer(Player):
'''
Takes moves based on a Keras neural network model.
'''
name = 'Keras'
def __init__(self, filepath):
self.model = load_model(filepath)
def __str__(self):
return self.name
def __repr__(self):
return self.name
##########
# Player #
##########
def choose_move(self, state):
assert state.cur_player() == 0
best_action = None
best_value = -1000000
for action in state.legal_moves():
s = state.copy()
s = s.make_move(action)
value = self.model.predict(normalize_board(s.board), batch_size=1)
assert value >= -1.0 and value <= 1.0
if value > best_value:
best_action = action
best_value = value
return best_action
## Instruction:
Rename state to game in KerasPlayer
## Code After:
from keras.models import load_model
from . import Player
from ..utils import normalize_board, utility
class KerasPlayer(Player):
'''
Takes moves based on a Keras neural network model.
'''
name = 'Keras'
def __init__(self, filepath):
self.model = load_model(filepath)
def __str__(self):
return self.name
def __repr__(self):
return self.name
##########
# Player #
##########
def choose_move(self, game):
assert game.cur_player() == 0
best_move = None
best_value = -1000000
for move in game.legal_moves():
next_game = game.copy().make_move(move)
value = self.model.predict(normalize_board(next_game.board), batch_size=1)
assert value >= -1.0 and value <= 1.0
if value > best_value:
best_move = move
best_value = value
return best_move
|
from keras.models import load_model
from . import Player
from ..utils import normalize_board, utility
class KerasPlayer(Player):
'''
Takes moves based on a Keras neural network model.
'''
name = 'Keras'
def __init__(self, filepath):
self.model = load_model(filepath)
def __str__(self):
return self.name
def __repr__(self):
return self.name
##########
# Player #
##########
- def choose_move(self, state):
? ^^ ^
+ def choose_move(self, game):
? ^ ^
- assert state.cur_player() == 0
? ^^ ^
+ assert game.cur_player() == 0
? ^ ^
- best_action = None
? ^^^^ ^
+ best_move = None
? ^ ^^
best_value = -1000000
- for action in state.legal_moves():
? ^^^^ ^ ^^ ^
+ for move in game.legal_moves():
? ^ ^^ ^ ^
+ next_game = game.copy().make_move(move)
- s = state.copy()
- s = s.make_move(action)
- value = self.model.predict(normalize_board(s.board), batch_size=1)
? ^
+ value = self.model.predict(normalize_board(next_game.board), batch_size=1)
? ^^^^^^^^^
assert value >= -1.0 and value <= 1.0
if value > best_value:
- best_action = action
? ^^^^ ^ ^^^^ ^
+ best_move = move
? ^ ^^ ^ ^^
best_value = value
- return best_action
? ^^^^ ^
+ return best_move
? ^ ^^
|
a49cc6d6ca1ce22358292c00d847cb424306b229
|
wordsaladflask.py
|
wordsaladflask.py
|
import wordsalad
from flask import Flask
App = Flask(__name__)
@App.route("salad/<int:n>/<string:corpus>")
def _get(self, n, corpus="default"):
"""Generate n word salads from the given (optional) corpus."""
pass
@App.route("salad/corpuses")
def _get_corpuses(self):
"""Fetch a list of "corpus:es" we can use as a source text.
Returns the list as a JSON-list of strings."""
pass
|
import wordsalad
from flask import Flask
App = Flask(__name__)
@App.route("salad/<int:n>/<string:corpus>")
def _get(self, n, corpus="default"):
"""Generate n word salads from the given (optional) corpus."""
pass
@App.route("salad/corpuses")
def _get_corpora(self):
"""Fetch a list of "corpora" we can use as a source text.
Returns the list as a JSON-list of strings."""
pass
def main():
app.run()
if __name__ == '__main__':
main()
|
Use the proper words ;)
|
Use the proper words ;)
|
Python
|
mit
|
skurmedel/wordsalad
|
import wordsalad
from flask import Flask
App = Flask(__name__)
@App.route("salad/<int:n>/<string:corpus>")
def _get(self, n, corpus="default"):
"""Generate n word salads from the given (optional) corpus."""
pass
@App.route("salad/corpuses")
- def _get_corpuses(self):
+ def _get_corpora(self):
- """Fetch a list of "corpus:es" we can use as a source text.
+ """Fetch a list of "corpora" we can use as a source text.
Returns the list as a JSON-list of strings."""
pass
+ def main():
+ app.run()
+
+ if __name__ == '__main__':
+ main()
|
Use the proper words ;)
|
## Code Before:
import wordsalad
from flask import Flask
App = Flask(__name__)
@App.route("salad/<int:n>/<string:corpus>")
def _get(self, n, corpus="default"):
"""Generate n word salads from the given (optional) corpus."""
pass
@App.route("salad/corpuses")
def _get_corpuses(self):
"""Fetch a list of "corpus:es" we can use as a source text.
Returns the list as a JSON-list of strings."""
pass
## Instruction:
Use the proper words ;)
## Code After:
import wordsalad
from flask import Flask
App = Flask(__name__)
@App.route("salad/<int:n>/<string:corpus>")
def _get(self, n, corpus="default"):
"""Generate n word salads from the given (optional) corpus."""
pass
@App.route("salad/corpuses")
def _get_corpora(self):
"""Fetch a list of "corpora" we can use as a source text.
Returns the list as a JSON-list of strings."""
pass
def main():
app.run()
if __name__ == '__main__':
main()
|
import wordsalad
from flask import Flask
App = Flask(__name__)
@App.route("salad/<int:n>/<string:corpus>")
def _get(self, n, corpus="default"):
"""Generate n word salads from the given (optional) corpus."""
pass
@App.route("salad/corpuses")
- def _get_corpuses(self):
? ^^^^
+ def _get_corpora(self):
? ^^^
- """Fetch a list of "corpus:es" we can use as a source text.
? ^^^^^
+ """Fetch a list of "corpora" we can use as a source text.
? ^^^
Returns the list as a JSON-list of strings."""
pass
+
+ def main():
+ app.run()
+
+ if __name__ == '__main__':
+ main()
|
25d39a7b78860102f7971033227ec157789a40b3
|
reporter/components/api_client.py
|
reporter/components/api_client.py
|
import json
import os
import requests
class ApiClient:
def __init__(self, host=None, timeout=5):
self.host = host or self.__default_host()
self.timeout = timeout
def post(self, payload):
print("Submitting payload to %s" % self.host)
headers = {"Content-Type": "application/json"}
response = requests.post(
"%s/test_reports" % self.host,
data=json.dumps(payload),
headers=headers,
timeout=self.timeout
)
return response
def __default_host(self):
return os.environ.get("CODECLIMATE_HOST", "https://codeclimate.com")
|
import json
import os
import requests
class ApiClient:
def __init__(self, host=None, timeout=5):
self.host = host or self.__default_host().rstrip("/")
self.timeout = timeout
def post(self, payload):
print("Submitting payload to %s" % self.host)
headers = {"Content-Type": "application/json"}
response = requests.post(
"%s/test_reports" % self.host,
data=json.dumps(payload),
headers=headers,
timeout=self.timeout
)
return response
def __default_host(self):
return os.environ.get("CODECLIMATE_API_HOST", "https://codeclimate.com")
|
Update ApiClient host env var to CODECLIMATE_API_HOST
|
Update ApiClient host env var to CODECLIMATE_API_HOST
This commit also strips trailing slashes from the host.
|
Python
|
mit
|
codeclimate/python-test-reporter,codeclimate/python-test-reporter
|
import json
import os
import requests
class ApiClient:
def __init__(self, host=None, timeout=5):
- self.host = host or self.__default_host()
+ self.host = host or self.__default_host().rstrip("/")
self.timeout = timeout
def post(self, payload):
print("Submitting payload to %s" % self.host)
headers = {"Content-Type": "application/json"}
response = requests.post(
"%s/test_reports" % self.host,
data=json.dumps(payload),
headers=headers,
timeout=self.timeout
)
return response
def __default_host(self):
- return os.environ.get("CODECLIMATE_HOST", "https://codeclimate.com")
+ return os.environ.get("CODECLIMATE_API_HOST", "https://codeclimate.com")
|
Update ApiClient host env var to CODECLIMATE_API_HOST
|
## Code Before:
import json
import os
import requests
class ApiClient:
def __init__(self, host=None, timeout=5):
self.host = host or self.__default_host()
self.timeout = timeout
def post(self, payload):
print("Submitting payload to %s" % self.host)
headers = {"Content-Type": "application/json"}
response = requests.post(
"%s/test_reports" % self.host,
data=json.dumps(payload),
headers=headers,
timeout=self.timeout
)
return response
def __default_host(self):
return os.environ.get("CODECLIMATE_HOST", "https://codeclimate.com")
## Instruction:
Update ApiClient host env var to CODECLIMATE_API_HOST
## Code After:
import json
import os
import requests
class ApiClient:
def __init__(self, host=None, timeout=5):
self.host = host or self.__default_host().rstrip("/")
self.timeout = timeout
def post(self, payload):
print("Submitting payload to %s" % self.host)
headers = {"Content-Type": "application/json"}
response = requests.post(
"%s/test_reports" % self.host,
data=json.dumps(payload),
headers=headers,
timeout=self.timeout
)
return response
def __default_host(self):
return os.environ.get("CODECLIMATE_API_HOST", "https://codeclimate.com")
|
import json
import os
import requests
class ApiClient:
def __init__(self, host=None, timeout=5):
- self.host = host or self.__default_host()
+ self.host = host or self.__default_host().rstrip("/")
? ++++++++++++
self.timeout = timeout
def post(self, payload):
print("Submitting payload to %s" % self.host)
headers = {"Content-Type": "application/json"}
response = requests.post(
"%s/test_reports" % self.host,
data=json.dumps(payload),
headers=headers,
timeout=self.timeout
)
return response
def __default_host(self):
- return os.environ.get("CODECLIMATE_HOST", "https://codeclimate.com")
+ return os.environ.get("CODECLIMATE_API_HOST", "https://codeclimate.com")
? ++++
|
b29e607d56ab07d07f4e33e2229a728cf0be1585
|
usability/python-markdown/pymdpreprocessor.py
|
usability/python-markdown/pymdpreprocessor.py
|
#-----------------------------------------------------------------------------
# Copyright (c) 2014, Juergen Hasch
#
# Distributed under the terms of the Modified BSD License.
#
#-----------------------------------------------------------------------------
from IPython.nbconvert.preprocessors import *
import re
class PyMarkdownPreprocessor(Preprocessor):
def replace_variables(self,source,variables):
"""
Replace {{variablename}} with stored value
"""
try:
replaced = re.sub("{{(.*?)}}", lambda m: variables[m.group(1)] , source)
except TypeError:
replaced = source
return replaced
def preprocess_cell(self, cell, resources, index):
"""
Preprocess cell
Parameters
----------
cell : NotebookNode cell
Notebook cell being processed
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
cell_index : int
Index of the cell being processed (see base.py)
"""
if cell.cell_type == "markdown":
if hasattr(cell['metadata'], 'variables'):
variables = cell['metadata']['variables']
if len(variables) > 0:
cell.source = self.replace_variables(cell.source, variables)
return cell, resources
|
from nbconvert.preprocessors import *
import re
def get_variable( match, variables):
try:
x = variables[match]
return x
except KeyError:
return ""
class PyMarkdownPreprocessor(Preprocessor):
def replace_variables(self,source,variables):
"""
Replace {{variablename}} with stored value
"""
try:
replaced = re.sub("{{(.*?)}}", lambda m: get_variable(m.group(1),variables) , source)
except TypeError:
replaced = source
return replaced
def preprocess_cell(self, cell, resources, index):
"""
Preprocess cell
Parameters
----------
cell : NotebookNode cell
Notebook cell being processed
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
cell_index : int
Index of the cell being processed (see base.py)
"""
if cell.cell_type == "markdown":
if hasattr(cell['metadata'], 'variables'):
variables = cell['metadata']['variables']
if len(variables) > 0:
cell.source = self.replace_variables(cell.source, variables)
return cell, resources
|
Update preprocessor for 4.x: New imports and make it more robust
|
Update preprocessor for 4.x: New imports and make it more robust
|
Python
|
bsd-3-clause
|
jbn/IPython-notebook-extensions,juhasch/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,Konubinix/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,motleytech/IPython-notebook-extensions,motleytech/IPython-notebook-extensions,Konubinix/IPython-notebook-extensions,motleytech/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions,jbn/IPython-notebook-extensions,juhasch/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions,jbn/IPython-notebook-extensions,Konubinix/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,motleytech/IPython-notebook-extensions,jbn/IPython-notebook-extensions,juhasch/IPython-notebook-extensions
|
- #-----------------------------------------------------------------------------
- # Copyright (c) 2014, Juergen Hasch
- #
- # Distributed under the terms of the Modified BSD License.
- #
- #-----------------------------------------------------------------------------
-
- from IPython.nbconvert.preprocessors import *
+ from nbconvert.preprocessors import *
import re
+ def get_variable( match, variables):
+ try:
+ x = variables[match]
+ return x
+ except KeyError:
+ return ""
+
+
class PyMarkdownPreprocessor(Preprocessor):
-
+
def replace_variables(self,source,variables):
"""
Replace {{variablename}} with stored value
"""
try:
- replaced = re.sub("{{(.*?)}}", lambda m: variables[m.group(1)] , source)
+ replaced = re.sub("{{(.*?)}}", lambda m: get_variable(m.group(1),variables) , source)
except TypeError:
replaced = source
return replaced
def preprocess_cell(self, cell, resources, index):
"""
Preprocess cell
Parameters
----------
cell : NotebookNode cell
Notebook cell being processed
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
cell_index : int
Index of the cell being processed (see base.py)
"""
if cell.cell_type == "markdown":
if hasattr(cell['metadata'], 'variables'):
variables = cell['metadata']['variables']
if len(variables) > 0:
cell.source = self.replace_variables(cell.source, variables)
return cell, resources
|
Update preprocessor for 4.x: New imports and make it more robust
|
## Code Before:
#-----------------------------------------------------------------------------
# Copyright (c) 2014, Juergen Hasch
#
# Distributed under the terms of the Modified BSD License.
#
#-----------------------------------------------------------------------------
from IPython.nbconvert.preprocessors import *
import re
class PyMarkdownPreprocessor(Preprocessor):
def replace_variables(self,source,variables):
"""
Replace {{variablename}} with stored value
"""
try:
replaced = re.sub("{{(.*?)}}", lambda m: variables[m.group(1)] , source)
except TypeError:
replaced = source
return replaced
def preprocess_cell(self, cell, resources, index):
"""
Preprocess cell
Parameters
----------
cell : NotebookNode cell
Notebook cell being processed
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
cell_index : int
Index of the cell being processed (see base.py)
"""
if cell.cell_type == "markdown":
if hasattr(cell['metadata'], 'variables'):
variables = cell['metadata']['variables']
if len(variables) > 0:
cell.source = self.replace_variables(cell.source, variables)
return cell, resources
## Instruction:
Update preprocessor for 4.x: New imports and make it more robust
## Code After:
from nbconvert.preprocessors import *
import re
def get_variable( match, variables):
try:
x = variables[match]
return x
except KeyError:
return ""
class PyMarkdownPreprocessor(Preprocessor):
def replace_variables(self,source,variables):
"""
Replace {{variablename}} with stored value
"""
try:
replaced = re.sub("{{(.*?)}}", lambda m: get_variable(m.group(1),variables) , source)
except TypeError:
replaced = source
return replaced
def preprocess_cell(self, cell, resources, index):
"""
Preprocess cell
Parameters
----------
cell : NotebookNode cell
Notebook cell being processed
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
cell_index : int
Index of the cell being processed (see base.py)
"""
if cell.cell_type == "markdown":
if hasattr(cell['metadata'], 'variables'):
variables = cell['metadata']['variables']
if len(variables) > 0:
cell.source = self.replace_variables(cell.source, variables)
return cell, resources
|
- #-----------------------------------------------------------------------------
- # Copyright (c) 2014, Juergen Hasch
- #
- # Distributed under the terms of the Modified BSD License.
- #
- #-----------------------------------------------------------------------------
-
- from IPython.nbconvert.preprocessors import *
? --------
+ from nbconvert.preprocessors import *
import re
+ def get_variable( match, variables):
+ try:
+ x = variables[match]
+ return x
+ except KeyError:
+ return ""
+
+
class PyMarkdownPreprocessor(Preprocessor):
-
+
def replace_variables(self,source,variables):
"""
Replace {{variablename}} with stored value
"""
try:
- replaced = re.sub("{{(.*?)}}", lambda m: variables[m.group(1)] , source)
? ^^ ^
+ replaced = re.sub("{{(.*?)}}", lambda m: get_variable(m.group(1),variables) , source)
? ++++ ^ ^^^^^^^^^^^
except TypeError:
replaced = source
return replaced
def preprocess_cell(self, cell, resources, index):
"""
Preprocess cell
Parameters
----------
cell : NotebookNode cell
Notebook cell being processed
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
cell_index : int
Index of the cell being processed (see base.py)
"""
if cell.cell_type == "markdown":
if hasattr(cell['metadata'], 'variables'):
variables = cell['metadata']['variables']
if len(variables) > 0:
cell.source = self.replace_variables(cell.source, variables)
return cell, resources
|
b6fff4186de098946cc1e4c0204f78936f73044f
|
tests/basics/tuple1.py
|
tests/basics/tuple1.py
|
x = (1, 2, 3 * 4)
print(x)
try:
x[0] = 4
except TypeError:
print("TypeError")
print(x)
try:
x.append(5)
except AttributeError:
print("AttributeError")
print(x[1:])
print(x[:-1])
print(x[2:3])
print(x + (10, 100, 10000))
# construction of tuple from large iterator (tests implementation detail of uPy)
print(tuple(range(20)))
# unsupported unary operation
try:
+()
except TypeError:
print('TypeError')
# unsupported type on RHS of add
try:
() + None
except TypeError:
print('TypeError')
|
x = (1, 2, 3 * 4)
print(x)
try:
x[0] = 4
except TypeError:
print("TypeError")
print(x)
try:
x.append(5)
except AttributeError:
print("AttributeError")
print(x[1:])
print(x[:-1])
print(x[2:3])
print(x + (10, 100, 10000))
# inplace add operator
x += (10, 11, 12)
print(x)
# construction of tuple from large iterator (tests implementation detail of uPy)
print(tuple(range(20)))
# unsupported unary operation
try:
+()
except TypeError:
print('TypeError')
# unsupported type on RHS of add
try:
() + None
except TypeError:
print('TypeError')
|
Add test for tuple inplace add.
|
tests/basics: Add test for tuple inplace add.
|
Python
|
mit
|
infinnovation/micropython,dmazzella/micropython,henriknelson/micropython,chrisdearman/micropython,deshipu/micropython,AriZuu/micropython,infinnovation/micropython,AriZuu/micropython,puuu/micropython,alex-robbins/micropython,torwag/micropython,SHA2017-badge/micropython-esp32,tralamazza/micropython,chrisdearman/micropython,deshipu/micropython,micropython/micropython-esp32,trezor/micropython,pfalcon/micropython,dmazzella/micropython,AriZuu/micropython,cwyark/micropython,kerneltask/micropython,torwag/micropython,dmazzella/micropython,selste/micropython,alex-robbins/micropython,pozetroninc/micropython,pozetroninc/micropython,swegener/micropython,tralamazza/micropython,lowRISC/micropython,pramasoul/micropython,PappaPeppar/micropython,deshipu/micropython,cwyark/micropython,lowRISC/micropython,ryannathans/micropython,MrSurly/micropython-esp32,adafruit/circuitpython,oopy/micropython,adafruit/micropython,chrisdearman/micropython,MrSurly/micropython,pramasoul/micropython,puuu/micropython,oopy/micropython,lowRISC/micropython,kerneltask/micropython,MrSurly/micropython-esp32,pozetroninc/micropython,ryannathans/micropython,infinnovation/micropython,swegener/micropython,torwag/micropython,bvernoux/micropython,selste/micropython,trezor/micropython,tobbad/micropython,blazewicz/micropython,adafruit/circuitpython,infinnovation/micropython,TDAbboud/micropython,torwag/micropython,bvernoux/micropython,SHA2017-badge/micropython-esp32,adafruit/circuitpython,HenrikSolver/micropython,AriZuu/micropython,micropython/micropython-esp32,blazewicz/micropython,cwyark/micropython,tralamazza/micropython,hiway/micropython,henriknelson/micropython,oopy/micropython,HenrikSolver/micropython,bvernoux/micropython,tobbad/micropython,hiway/micropython,deshipu/micropython,torwag/micropython,henriknelson/micropython,henriknelson/micropython,Peetz0r/micropython-esp32,kerneltask/micropython,cwyark/micropython,blazewicz/micropython,micropython/micropython-esp32,cwyark/micropython,micropython/micropython-esp32,ryannathans/micropython,PappaPeppar/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,kerneltask/micropython,alex-robbins/micropython,deshipu/micropython,PappaPeppar/micropython,pfalcon/micropython,tobbad/micropython,pramasoul/micropython,MrSurly/micropython-esp32,hiway/micropython,ryannathans/micropython,swegener/micropython,kerneltask/micropython,selste/micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,Timmenem/micropython,blazewicz/micropython,adafruit/circuitpython,Timmenem/micropython,MrSurly/micropython,Timmenem/micropython,adafruit/circuitpython,infinnovation/micropython,tralamazza/micropython,toolmacher/micropython,MrSurly/micropython,pozetroninc/micropython,adafruit/circuitpython,lowRISC/micropython,ryannathans/micropython,toolmacher/micropython,pfalcon/micropython,dmazzella/micropython,lowRISC/micropython,Peetz0r/micropython-esp32,SHA2017-badge/micropython-esp32,chrisdearman/micropython,AriZuu/micropython,adafruit/micropython,hiway/micropython,SHA2017-badge/micropython-esp32,bvernoux/micropython,TDAbboud/micropython,oopy/micropython,MrSurly/micropython-esp32,pramasoul/micropython,tobbad/micropython,tobbad/micropython,selste/micropython,adafruit/micropython,HenrikSolver/micropython,puuu/micropython,puuu/micropython,trezor/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,chrisdearman/micropython,toolmacher/micropython,oopy/micropython,hiway/micropython,adafruit/micropython,toolmacher/micropython,alex-robbins/micropython,Timmenem/micropython,TDAbboud/micropython,alex-robbins/micropython,pfalcon/micropython,pramasoul/micropython,bvernoux/micropython,henriknelson/micropython,trezor/micropython,TDAbboud/micropython,selste/micropython,adafruit/micropython,puuu/micropython,swegener/micropython,micropython/micropython-esp32,TDAbboud/micropython,MrSurly/micropython,swegener/micropython,HenrikSolver/micropython,pozetroninc/micropython,pfalcon/micropython,MrSurly/micropython-esp32,trezor/micropython,MrSurly/micropython,Timmenem/micropython,SHA2017-badge/micropython-esp32,toolmacher/micropython
|
x = (1, 2, 3 * 4)
print(x)
try:
x[0] = 4
except TypeError:
print("TypeError")
print(x)
try:
x.append(5)
except AttributeError:
print("AttributeError")
print(x[1:])
print(x[:-1])
print(x[2:3])
print(x + (10, 100, 10000))
+ # inplace add operator
+ x += (10, 11, 12)
+ print(x)
+
# construction of tuple from large iterator (tests implementation detail of uPy)
print(tuple(range(20)))
# unsupported unary operation
try:
+()
except TypeError:
print('TypeError')
# unsupported type on RHS of add
try:
() + None
except TypeError:
print('TypeError')
|
Add test for tuple inplace add.
|
## Code Before:
x = (1, 2, 3 * 4)
print(x)
try:
x[0] = 4
except TypeError:
print("TypeError")
print(x)
try:
x.append(5)
except AttributeError:
print("AttributeError")
print(x[1:])
print(x[:-1])
print(x[2:3])
print(x + (10, 100, 10000))
# construction of tuple from large iterator (tests implementation detail of uPy)
print(tuple(range(20)))
# unsupported unary operation
try:
+()
except TypeError:
print('TypeError')
# unsupported type on RHS of add
try:
() + None
except TypeError:
print('TypeError')
## Instruction:
Add test for tuple inplace add.
## Code After:
x = (1, 2, 3 * 4)
print(x)
try:
x[0] = 4
except TypeError:
print("TypeError")
print(x)
try:
x.append(5)
except AttributeError:
print("AttributeError")
print(x[1:])
print(x[:-1])
print(x[2:3])
print(x + (10, 100, 10000))
# inplace add operator
x += (10, 11, 12)
print(x)
# construction of tuple from large iterator (tests implementation detail of uPy)
print(tuple(range(20)))
# unsupported unary operation
try:
+()
except TypeError:
print('TypeError')
# unsupported type on RHS of add
try:
() + None
except TypeError:
print('TypeError')
|
x = (1, 2, 3 * 4)
print(x)
try:
x[0] = 4
except TypeError:
print("TypeError")
print(x)
try:
x.append(5)
except AttributeError:
print("AttributeError")
print(x[1:])
print(x[:-1])
print(x[2:3])
print(x + (10, 100, 10000))
+ # inplace add operator
+ x += (10, 11, 12)
+ print(x)
+
# construction of tuple from large iterator (tests implementation detail of uPy)
print(tuple(range(20)))
# unsupported unary operation
try:
+()
except TypeError:
print('TypeError')
# unsupported type on RHS of add
try:
() + None
except TypeError:
print('TypeError')
|
fd09e0ef4ea9a0dede74e5a87ad108a75d5e5ce7
|
comrade/core/decorators.py
|
comrade/core/decorators.py
|
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from django.http import HttpResponse
from django.template import loader, RequestContext
from functools import wraps
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, template_name='401.html'):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user, *args, **kwargs):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
t = loader.get_template(template_name)
return HttpResponse(t.render(RequestContext(request)), status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
|
from django.shortcuts import get_object_or_404
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from django.http import HttpResponse
from django.template import loader, RequestContext
from functools import wraps
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, template_name='401.html'):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user, *args, **kwargs):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
t = loader.get_template(template_name)
return HttpResponse(t.render(RequestContext(request)), status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
def load_instance(model):
def decorator(view):
def _wrapper(request, object_id=None, *args, **kwargs):
if object_id:
instance = get_object_or_404(model, pk=object_id)
return view(request, instance, *args, **kwargs)
return view(request, *args, **kwargs)
return wraps(view)(_wrapper)
return decorator
|
Add decorator for loading instance of a model in a view.
|
Add decorator for loading instance of a model in a view.
|
Python
|
mit
|
bueda/django-comrade
|
+ from django.shortcuts import get_object_or_404
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from django.http import HttpResponse
from django.template import loader, RequestContext
from functools import wraps
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, template_name='401.html'):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user, *args, **kwargs):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
t = loader.get_template(template_name)
return HttpResponse(t.render(RequestContext(request)), status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
+ def load_instance(model):
+ def decorator(view):
+ def _wrapper(request, object_id=None, *args, **kwargs):
+ if object_id:
+ instance = get_object_or_404(model, pk=object_id)
+ return view(request, instance, *args, **kwargs)
+ return view(request, *args, **kwargs)
+ return wraps(view)(_wrapper)
+ return decorator
+
|
Add decorator for loading instance of a model in a view.
|
## Code Before:
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from django.http import HttpResponse
from django.template import loader, RequestContext
from functools import wraps
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, template_name='401.html'):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user, *args, **kwargs):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
t = loader.get_template(template_name)
return HttpResponse(t.render(RequestContext(request)), status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
## Instruction:
Add decorator for loading instance of a model in a view.
## Code After:
from django.shortcuts import get_object_or_404
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from django.http import HttpResponse
from django.template import loader, RequestContext
from functools import wraps
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, template_name='401.html'):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user, *args, **kwargs):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
t = loader.get_template(template_name)
return HttpResponse(t.render(RequestContext(request)), status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
def load_instance(model):
def decorator(view):
def _wrapper(request, object_id=None, *args, **kwargs):
if object_id:
instance = get_object_or_404(model, pk=object_id)
return view(request, instance, *args, **kwargs)
return view(request, *args, **kwargs)
return wraps(view)(_wrapper)
return decorator
|
+ from django.shortcuts import get_object_or_404
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from django.http import HttpResponse
from django.template import loader, RequestContext
from functools import wraps
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, template_name='401.html'):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user, *args, **kwargs):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
t = loader.get_template(template_name)
return HttpResponse(t.render(RequestContext(request)), status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
+
+ def load_instance(model):
+ def decorator(view):
+ def _wrapper(request, object_id=None, *args, **kwargs):
+ if object_id:
+ instance = get_object_or_404(model, pk=object_id)
+ return view(request, instance, *args, **kwargs)
+ return view(request, *args, **kwargs)
+ return wraps(view)(_wrapper)
+ return decorator
|
d85b58a0edce8321312eff66f16fc72439e4426a
|
app/sense.py
|
app/sense.py
|
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
DEVICE = "PiSense"
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
|
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
import time
DEVICE = "PiSense"
DELAY = 0.0
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
time.sleep(DELAY)
|
Add ability to control read rate
|
Add ability to control read rate
|
Python
|
mit
|
thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x
|
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
+ import time
DEVICE = "PiSense"
+ DELAY = 0.0
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
+ time.sleep(DELAY)
|
Add ability to control read rate
|
## Code Before:
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
DEVICE = "PiSense"
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
## Instruction:
Add ability to control read rate
## Code After:
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
import time
DEVICE = "PiSense"
DELAY = 0.0
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
time.sleep(DELAY)
|
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
+ import time
DEVICE = "PiSense"
+ DELAY = 0.0
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
+ time.sleep(DELAY)
|
83c7fb070d0d79036ce697835e69c5e0aa2e14b7
|
app/core/info.py
|
app/core/info.py
|
import os
import pathlib
# RELEASE-UPDATE
APP_DIR = pathlib.Path(os.path.realpath(__file__)).parent.parent
ROOT_DIR = APP_DIR.parent
DEFAULT_DB_PATH = '/instance/storage/storage.db'
PROJECT_NAME = 'Zordon'
PROJECT_VERSION = '4.0.0'
PROJECT_FULL_NAME = '{} v{}'.format(PROJECT_NAME, PROJECT_VERSION)
|
import os
import pathlib
# RELEASE-UPDATE
APP_DIR = pathlib.Path(os.path.realpath(__file__)).parent.parent
ROOT_DIR = APP_DIR.parent
DEFAULT_DB_PATH = '/instance/storage'
PROJECT_NAME = 'Zordon'
PROJECT_VERSION = '4.0.0'
PROJECT_FULL_NAME = '{} v{}'.format(PROJECT_NAME, PROJECT_VERSION)
|
Fix storage path for Docker mode
|
Fix storage path for Docker mode
|
Python
|
mit
|
KrusnikViers/Zordon,KrusnikViers/Zordon
|
import os
import pathlib
# RELEASE-UPDATE
APP_DIR = pathlib.Path(os.path.realpath(__file__)).parent.parent
ROOT_DIR = APP_DIR.parent
- DEFAULT_DB_PATH = '/instance/storage/storage.db'
+ DEFAULT_DB_PATH = '/instance/storage'
PROJECT_NAME = 'Zordon'
PROJECT_VERSION = '4.0.0'
PROJECT_FULL_NAME = '{} v{}'.format(PROJECT_NAME, PROJECT_VERSION)
|
Fix storage path for Docker mode
|
## Code Before:
import os
import pathlib
# RELEASE-UPDATE
APP_DIR = pathlib.Path(os.path.realpath(__file__)).parent.parent
ROOT_DIR = APP_DIR.parent
DEFAULT_DB_PATH = '/instance/storage/storage.db'
PROJECT_NAME = 'Zordon'
PROJECT_VERSION = '4.0.0'
PROJECT_FULL_NAME = '{} v{}'.format(PROJECT_NAME, PROJECT_VERSION)
## Instruction:
Fix storage path for Docker mode
## Code After:
import os
import pathlib
# RELEASE-UPDATE
APP_DIR = pathlib.Path(os.path.realpath(__file__)).parent.parent
ROOT_DIR = APP_DIR.parent
DEFAULT_DB_PATH = '/instance/storage'
PROJECT_NAME = 'Zordon'
PROJECT_VERSION = '4.0.0'
PROJECT_FULL_NAME = '{} v{}'.format(PROJECT_NAME, PROJECT_VERSION)
|
import os
import pathlib
# RELEASE-UPDATE
APP_DIR = pathlib.Path(os.path.realpath(__file__)).parent.parent
ROOT_DIR = APP_DIR.parent
- DEFAULT_DB_PATH = '/instance/storage/storage.db'
? -----------
+ DEFAULT_DB_PATH = '/instance/storage'
PROJECT_NAME = 'Zordon'
PROJECT_VERSION = '4.0.0'
PROJECT_FULL_NAME = '{} v{}'.format(PROJECT_NAME, PROJECT_VERSION)
|
be4d21b5486f3bba5a4d844015d3d35630ac7d03
|
udata/auth/forms.py
|
udata/auth/forms.py
|
from __future__ import unicode_literals
from flask_security.forms import RegisterForm
from udata.forms import fields
from udata.forms import validators
class ExtendedRegisterForm(RegisterForm):
first_name = fields.StringField(
'First Name', [validators.Required('First name is required')])
last_name = fields.StringField(
'Last Name', [validators.Required('Last name is required')])
|
from __future__ import unicode_literals
from flask_security.forms import RegisterForm
from udata.forms import fields
from udata.forms import validators
from udata.i18n import lazy_gettext as _
class ExtendedRegisterForm(RegisterForm):
first_name = fields.StringField(
_('First name'), [validators.Required(_('First name is required'))])
last_name = fields.StringField(
_('Last name'), [validators.Required(_('Last name is required'))])
|
Apply i18n to First and Last name in registration form
|
Apply i18n to First and Last name in registration form
|
Python
|
agpl-3.0
|
etalab/udata,etalab/udata,etalab/udata,opendatateam/udata,opendatateam/udata,opendatateam/udata
|
from __future__ import unicode_literals
from flask_security.forms import RegisterForm
from udata.forms import fields
from udata.forms import validators
-
+ from udata.i18n import lazy_gettext as _
class ExtendedRegisterForm(RegisterForm):
first_name = fields.StringField(
- 'First Name', [validators.Required('First name is required')])
+ _('First name'), [validators.Required(_('First name is required'))])
last_name = fields.StringField(
- 'Last Name', [validators.Required('Last name is required')])
+ _('Last name'), [validators.Required(_('Last name is required'))])
|
Apply i18n to First and Last name in registration form
|
## Code Before:
from __future__ import unicode_literals
from flask_security.forms import RegisterForm
from udata.forms import fields
from udata.forms import validators
class ExtendedRegisterForm(RegisterForm):
first_name = fields.StringField(
'First Name', [validators.Required('First name is required')])
last_name = fields.StringField(
'Last Name', [validators.Required('Last name is required')])
## Instruction:
Apply i18n to First and Last name in registration form
## Code After:
from __future__ import unicode_literals
from flask_security.forms import RegisterForm
from udata.forms import fields
from udata.forms import validators
from udata.i18n import lazy_gettext as _
class ExtendedRegisterForm(RegisterForm):
first_name = fields.StringField(
_('First name'), [validators.Required(_('First name is required'))])
last_name = fields.StringField(
_('Last name'), [validators.Required(_('Last name is required'))])
|
from __future__ import unicode_literals
from flask_security.forms import RegisterForm
from udata.forms import fields
from udata.forms import validators
-
+ from udata.i18n import lazy_gettext as _
class ExtendedRegisterForm(RegisterForm):
first_name = fields.StringField(
- 'First Name', [validators.Required('First name is required')])
? ^
+ _('First name'), [validators.Required(_('First name is required'))])
? ++ ^ + ++ +
last_name = fields.StringField(
- 'Last Name', [validators.Required('Last name is required')])
? ^
+ _('Last name'), [validators.Required(_('Last name is required'))])
? ++ ^ + ++ +
|
29f91d362689a53e04557588e47f1ac3e8d0fadc
|
server/lib/pricing.py
|
server/lib/pricing.py
|
def price(printTime, filamentUsed, filament=None):
return round((printTime/60/60)*filament['price'])
|
def price(printTime, filamentUsed, filament=None):
return round((printTime/60/60)*200)
|
Set constant price for all filaments
|
Set constant price for all filaments
|
Python
|
agpl-3.0
|
MakersLab/custom-print
|
def price(printTime, filamentUsed, filament=None):
- return round((printTime/60/60)*filament['price'])
+ return round((printTime/60/60)*200)
|
Set constant price for all filaments
|
## Code Before:
def price(printTime, filamentUsed, filament=None):
return round((printTime/60/60)*filament['price'])
## Instruction:
Set constant price for all filaments
## Code After:
def price(printTime, filamentUsed, filament=None):
return round((printTime/60/60)*200)
|
def price(printTime, filamentUsed, filament=None):
- return round((printTime/60/60)*filament['price'])
? ^^^^^^^^^^^^^^^^^
+ return round((printTime/60/60)*200)
? ^^^
|
d48ae791364a0d29d60636adfde1f143858794cd
|
api/identifiers/serializers.py
|
api/identifiers/serializers.py
|
from rest_framework import serializers as ser
from api.base.utils import absolute_reverse
from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField
class IdentifierSerializer(JSONAPISerializer):
category = ser.CharField(read_only=True)
filterable_fields = frozenset(['category'])
value = ser.CharField(read_only=True)
referent = RelationshipField(
related_view='registrations:registration-detail',
related_view_kwargs={'node_id': '<referent._id>'},
)
id = IDField(source='_id', read_only=True)
links = LinksField({'self': 'self_url'})
class Meta:
type_ = 'identifiers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_id(self, obj):
return obj._id
def get_detail_url(self, obj):
import ipdb; ipdb.set_trace()
return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id)
def self_url(self, obj):
return absolute_reverse('identifiers:identifier-detail', kwargs={
'identifier_id': obj._id,
})
|
from rest_framework import serializers as ser
from api.base.utils import absolute_reverse
from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField
class IdentifierSerializer(JSONAPISerializer):
category = ser.CharField(read_only=True)
filterable_fields = frozenset(['category'])
value = ser.CharField(read_only=True)
referent = RelationshipField(
related_view='registrations:registration-detail',
related_view_kwargs={'node_id': '<referent._id>'},
)
id = IDField(source='_id', read_only=True)
links = LinksField({'self': 'self_url'})
class Meta:
type_ = 'identifiers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_id(self, obj):
return obj._id
def get_detail_url(self, obj):
return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id)
def self_url(self, obj):
return absolute_reverse('identifiers:identifier-detail', kwargs={
'identifier_id': obj._id,
})
|
Remove rogue debugger how embarassing
|
Remove rogue debugger how embarassing
|
Python
|
apache-2.0
|
rdhyee/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,acshi/osf.io,abought/osf.io,amyshi188/osf.io,erinspace/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,leb2dg/osf.io,mattclark/osf.io,samchrisinger/osf.io,alexschiller/osf.io,mluke93/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,rdhyee/osf.io,caneruguz/osf.io,laurenrevere/osf.io,amyshi188/osf.io,acshi/osf.io,saradbowman/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,aaxelb/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,zamattiac/osf.io,mfraezz/osf.io,baylee-d/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,samchrisinger/osf.io,laurenrevere/osf.io,mattclark/osf.io,SSJohns/osf.io,acshi/osf.io,mluke93/osf.io,cslzchen/osf.io,wearpants/osf.io,hmoco/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,SSJohns/osf.io,wearpants/osf.io,abought/osf.io,felliott/osf.io,wearpants/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,jnayak1/osf.io,TomBaxter/osf.io,abought/osf.io,pattisdr/osf.io,aaxelb/osf.io,Nesiehr/osf.io,jnayak1/osf.io,crcresearch/osf.io,alexschiller/osf.io,baylee-d/osf.io,baylee-d/osf.io,chennan47/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,emetsger/osf.io,samchrisinger/osf.io,adlius/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,chrisseto/osf.io,jnayak1/osf.io,binoculars/osf.io,erinspace/osf.io,adlius/osf.io,cwisecarver/osf.io,emetsger/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,kwierman/osf.io,wearpants/osf.io,mluo613/osf.io,leb2dg/osf.io,zamattiac/osf.io,adlius/osf.io,cslzchen/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,erinspace/osf.io,monikagrabowska/osf.io,SSJohns/osf.io,chennan47/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,pattisdr/osf.io,TomBaxter/osf.io,sloria/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,amyshi188/osf.io,emetsger/osf.io,binoculars/osf.io,Nesiehr/osf.io,zamattiac/osf.io,kwierman/osf.io,mluo613/osf.io,icereval/osf.io,hmoco/osf.io,saradbowman/osf.io,caseyrollins/osf.io,mfraezz/osf.io,caneruguz/osf.io,felliott/osf.io,mluo613/osf.io,rdhyee/osf.io,emetsger/osf.io,Nesiehr/osf.io,alexschiller/osf.io,cslzchen/osf.io,hmoco/osf.io,Nesiehr/osf.io,kwierman/osf.io,chrisseto/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,felliott/osf.io,acshi/osf.io,sloria/osf.io,amyshi188/osf.io,mfraezz/osf.io,adlius/osf.io,HalcyonChimera/osf.io,felliott/osf.io,laurenrevere/osf.io,mluo613/osf.io,caseyrollins/osf.io,aaxelb/osf.io,kwierman/osf.io,monikagrabowska/osf.io,mluke93/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,icereval/osf.io,mattclark/osf.io,cslzchen/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,abought/osf.io,icereval/osf.io,pattisdr/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,brianjgeiger/osf.io,acshi/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,mluke93/osf.io,Johnetordoff/osf.io
|
from rest_framework import serializers as ser
from api.base.utils import absolute_reverse
from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField
class IdentifierSerializer(JSONAPISerializer):
category = ser.CharField(read_only=True)
filterable_fields = frozenset(['category'])
value = ser.CharField(read_only=True)
referent = RelationshipField(
related_view='registrations:registration-detail',
related_view_kwargs={'node_id': '<referent._id>'},
)
id = IDField(source='_id', read_only=True)
links = LinksField({'self': 'self_url'})
class Meta:
type_ = 'identifiers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_id(self, obj):
return obj._id
def get_detail_url(self, obj):
- import ipdb; ipdb.set_trace()
return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id)
def self_url(self, obj):
return absolute_reverse('identifiers:identifier-detail', kwargs={
'identifier_id': obj._id,
})
|
Remove rogue debugger how embarassing
|
## Code Before:
from rest_framework import serializers as ser
from api.base.utils import absolute_reverse
from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField
class IdentifierSerializer(JSONAPISerializer):
category = ser.CharField(read_only=True)
filterable_fields = frozenset(['category'])
value = ser.CharField(read_only=True)
referent = RelationshipField(
related_view='registrations:registration-detail',
related_view_kwargs={'node_id': '<referent._id>'},
)
id = IDField(source='_id', read_only=True)
links = LinksField({'self': 'self_url'})
class Meta:
type_ = 'identifiers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_id(self, obj):
return obj._id
def get_detail_url(self, obj):
import ipdb; ipdb.set_trace()
return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id)
def self_url(self, obj):
return absolute_reverse('identifiers:identifier-detail', kwargs={
'identifier_id': obj._id,
})
## Instruction:
Remove rogue debugger how embarassing
## Code After:
from rest_framework import serializers as ser
from api.base.utils import absolute_reverse
from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField
class IdentifierSerializer(JSONAPISerializer):
category = ser.CharField(read_only=True)
filterable_fields = frozenset(['category'])
value = ser.CharField(read_only=True)
referent = RelationshipField(
related_view='registrations:registration-detail',
related_view_kwargs={'node_id': '<referent._id>'},
)
id = IDField(source='_id', read_only=True)
links = LinksField({'self': 'self_url'})
class Meta:
type_ = 'identifiers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_id(self, obj):
return obj._id
def get_detail_url(self, obj):
return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id)
def self_url(self, obj):
return absolute_reverse('identifiers:identifier-detail', kwargs={
'identifier_id': obj._id,
})
|
from rest_framework import serializers as ser
from api.base.utils import absolute_reverse
from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField
class IdentifierSerializer(JSONAPISerializer):
category = ser.CharField(read_only=True)
filterable_fields = frozenset(['category'])
value = ser.CharField(read_only=True)
referent = RelationshipField(
related_view='registrations:registration-detail',
related_view_kwargs={'node_id': '<referent._id>'},
)
id = IDField(source='_id', read_only=True)
links = LinksField({'self': 'self_url'})
class Meta:
type_ = 'identifiers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_id(self, obj):
return obj._id
def get_detail_url(self, obj):
- import ipdb; ipdb.set_trace()
return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id)
def self_url(self, obj):
return absolute_reverse('identifiers:identifier-detail', kwargs={
'identifier_id': obj._id,
})
|
8d3931fd5effabf9c5d56cb03ae15630ae984963
|
postalcodes_mexico/cli.py
|
postalcodes_mexico/cli.py
|
"""Console script for postalcodes_mexico."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for postalcodes_mexico."""
click.echo("Replace this message by putting your code into "
"postalcodes_mexico.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
"""Console script for postalcodes_mexico."""
import sys
import click
from postalcodes_mexico import postalcodes_mexico
@click.command()
@click.argument('postalcode', type=str)
def main(postalcode):
"""Console script for postalcodes_mexico."""
places = postalcodes_mexico.places(postalcode)
click.echo(places)
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
Create simple CLI for the `places` function
|
Create simple CLI for the `places` function
|
Python
|
mit
|
FlowFX/postalcodes_mexico
|
"""Console script for postalcodes_mexico."""
import sys
import click
+ from postalcodes_mexico import postalcodes_mexico
+
@click.command()
- def main(args=None):
+ @click.argument('postalcode', type=str)
+ def main(postalcode):
"""Console script for postalcodes_mexico."""
+ places = postalcodes_mexico.places(postalcode)
+ click.echo(places)
- click.echo("Replace this message by putting your code into "
- "postalcodes_mexico.cli.main")
- click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
Create simple CLI for the `places` function
|
## Code Before:
"""Console script for postalcodes_mexico."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for postalcodes_mexico."""
click.echo("Replace this message by putting your code into "
"postalcodes_mexico.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
## Instruction:
Create simple CLI for the `places` function
## Code After:
"""Console script for postalcodes_mexico."""
import sys
import click
from postalcodes_mexico import postalcodes_mexico
@click.command()
@click.argument('postalcode', type=str)
def main(postalcode):
"""Console script for postalcodes_mexico."""
places = postalcodes_mexico.places(postalcode)
click.echo(places)
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
"""Console script for postalcodes_mexico."""
import sys
import click
+ from postalcodes_mexico import postalcodes_mexico
+
@click.command()
- def main(args=None):
+ @click.argument('postalcode', type=str)
+ def main(postalcode):
"""Console script for postalcodes_mexico."""
+ places = postalcodes_mexico.places(postalcode)
+ click.echo(places)
- click.echo("Replace this message by putting your code into "
- "postalcodes_mexico.cli.main")
- click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
f4c9482e41ec2ee6c894a413e8fcb0349a9edbd1
|
tapiriik/web/templatetags/displayutils.py
|
tapiriik/web/templatetags/displayutils.py
|
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
return str(round(float(value) * 100)) + "%"
|
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
try:
return str(round(float(value) * 100)) + "%"
except ValueError:
return value
|
Fix broken diagnostic dashboard with new sync progress values
|
Fix broken diagnostic dashboard with new sync progress values
|
Python
|
apache-2.0
|
campbellr/tapiriik,niosus/tapiriik,gavioto/tapiriik,cheatos101/tapiriik,cheatos101/tapiriik,brunoflores/tapiriik,abhijit86k/tapiriik,mjnbike/tapiriik,dlenski/tapiriik,abhijit86k/tapiriik,cpfair/tapiriik,marxin/tapiriik,abhijit86k/tapiriik,dlenski/tapiriik,cheatos101/tapiriik,abs0/tapiriik,niosus/tapiriik,dmschreiber/tapiriik,gavioto/tapiriik,cmgrote/tapiriik,campbellr/tapiriik,abs0/tapiriik,cheatos101/tapiriik,brunoflores/tapiriik,mjnbike/tapiriik,cmgrote/tapiriik,cpfair/tapiriik,marxin/tapiriik,dmschreiber/tapiriik,mduggan/tapiriik,cmgrote/tapiriik,mduggan/tapiriik,dmschreiber/tapiriik,cgourlay/tapiriik,brunoflores/tapiriik,dlenski/tapiriik,mjnbike/tapiriik,cpfair/tapiriik,abs0/tapiriik,gavioto/tapiriik,campbellr/tapiriik,marxin/tapiriik,campbellr/tapiriik,dlenski/tapiriik,mduggan/tapiriik,gavioto/tapiriik,brunoflores/tapiriik,mduggan/tapiriik,olamy/tapiriik,niosus/tapiriik,marxin/tapiriik,cgourlay/tapiriik,dmschreiber/tapiriik,abhijit86k/tapiriik,mjnbike/tapiriik,cmgrote/tapiriik,cgourlay/tapiriik,cpfair/tapiriik,niosus/tapiriik,olamy/tapiriik,cgourlay/tapiriik,olamy/tapiriik,olamy/tapiriik,abs0/tapiriik
|
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
+ try:
- return str(round(float(value) * 100)) + "%"
+ return str(round(float(value) * 100)) + "%"
+ except ValueError:
+ return value
|
Fix broken diagnostic dashboard with new sync progress values
|
## Code Before:
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
return str(round(float(value) * 100)) + "%"
## Instruction:
Fix broken diagnostic dashboard with new sync progress values
## Code After:
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
try:
return str(round(float(value) * 100)) + "%"
except ValueError:
return value
|
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
+ try:
- return str(round(float(value) * 100)) + "%"
+ return str(round(float(value) * 100)) + "%"
? ++++
+ except ValueError:
+ return value
|
9e666e97b07d7c08e434791a061086010da6e6eb
|
main.py
|
main.py
|
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def main():
bearer_token = get_access_token()
if __name__ == '__main__':
main()
|
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def get_latest_tweet(token):
parameters = {'screen_name': 'TwoHeadlines',
'count': 1,
'trim_user': True}
headers = {'Authorization': 'Bearer ' + token}
r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
params=parameters, headers=headers)
return r.json(encoding='utf8')[0]['text']
def main():
bearer_token = get_access_token()
latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
|
Add ability to get the latest TwoHeadlines tweet
|
Add ability to get the latest TwoHeadlines tweet
|
Python
|
mit
|
underyx/TheMajorNews
|
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
+ def get_latest_tweet(token):
+ parameters = {'screen_name': 'TwoHeadlines',
+ 'count': 1,
+ 'trim_user': True}
+
+ headers = {'Authorization': 'Bearer ' + token}
+
+ r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
+ params=parameters, headers=headers)
+
+ return r.json(encoding='utf8')[0]['text']
+
+
def main():
bearer_token = get_access_token()
+ latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
|
Add ability to get the latest TwoHeadlines tweet
|
## Code Before:
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def main():
bearer_token = get_access_token()
if __name__ == '__main__':
main()
## Instruction:
Add ability to get the latest TwoHeadlines tweet
## Code After:
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def get_latest_tweet(token):
parameters = {'screen_name': 'TwoHeadlines',
'count': 1,
'trim_user': True}
headers = {'Authorization': 'Bearer ' + token}
r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
params=parameters, headers=headers)
return r.json(encoding='utf8')[0]['text']
def main():
bearer_token = get_access_token()
latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
|
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
+ def get_latest_tweet(token):
+ parameters = {'screen_name': 'TwoHeadlines',
+ 'count': 1,
+ 'trim_user': True}
+
+ headers = {'Authorization': 'Bearer ' + token}
+
+ r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
+ params=parameters, headers=headers)
+
+ return r.json(encoding='utf8')[0]['text']
+
+
def main():
bearer_token = get_access_token()
+ latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
|
7ec28d5b8be40b505a20a4670857278ad41f760b
|
src/puzzle/puzzlepedia/puzzlepedia.py
|
src/puzzle/puzzlepedia/puzzlepedia.py
|
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None, threshold=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint, threshold=threshold)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
Allow "threshold" to be specified during parse(...).
|
Allow "threshold" to be specified during parse(...).
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
- def parse(source, hint=None):
+ def parse(source, hint=None, threshold=None):
_init()
- result = puzzle.Puzzle('first stage', source, hint=hint)
+ result = puzzle.Puzzle('first stage', source, hint=hint, threshold=threshold)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
Allow "threshold" to be specified during parse(...).
|
## Code Before:
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
## Instruction:
Allow "threshold" to be specified during parse(...).
## Code After:
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None, threshold=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint, threshold=threshold)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
- def parse(source, hint=None):
+ def parse(source, hint=None, threshold=None):
? ++++++++++++++++
_init()
- result = puzzle.Puzzle('first stage', source, hint=hint)
+ result = puzzle.Puzzle('first stage', source, hint=hint, threshold=threshold)
? +++++++++++++++++++++
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
dceae6725d10a5d1af6287e1b684c651683d1750
|
runtests.py
|
runtests.py
|
import sys
from os.path import dirname, abspath
from django.conf import settings
if len(sys.argv) > 1 and 'postgres' in sys.argv:
sys.argv.remove('postgres')
db_engine = 'postgresql_psycopg2'
db_name = 'test_main'
else:
db_engine = 'sqlite3'
db_name = ''
if not settings.configured:
settings.configure(
DATABASE_ENGINE = db_engine,
DATABASE_NAME = db_name,
INSTALLED_APPS = [
'django.contrib.contenttypes',
'genericm2m',
'genericm2m.genericm2m_tests',
],
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['genericm2m_tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
import sys
from os.path import dirname, abspath
import django
from django.conf import settings
if len(sys.argv) > 1 and 'postgres' in sys.argv:
sys.argv.remove('postgres')
db_engine = 'django.db.backends.postgresql_psycopg2'
db_name = 'test_main'
else:
db_engine = 'django.db.backends.sqlite3'
db_name = ''
if not settings.configured:
settings.configure(
DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)),
INSTALLED_APPS = [
'django.contrib.contenttypes',
'genericm2m',
'genericm2m.genericm2m_tests',
],
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['genericm2m_tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
Allow tests to be run on 1.4
|
Allow tests to be run on 1.4
|
Python
|
mit
|
jayfk/django-generic-m2m,jayfk/django-generic-m2m,coleifer/django-generic-m2m,coleifer/django-generic-m2m,coleifer/django-generic-m2m
|
import sys
from os.path import dirname, abspath
+ import django
from django.conf import settings
if len(sys.argv) > 1 and 'postgres' in sys.argv:
sys.argv.remove('postgres')
- db_engine = 'postgresql_psycopg2'
+ db_engine = 'django.db.backends.postgresql_psycopg2'
db_name = 'test_main'
else:
- db_engine = 'sqlite3'
+ db_engine = 'django.db.backends.sqlite3'
db_name = ''
if not settings.configured:
settings.configure(
+ DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)),
- DATABASE_ENGINE = db_engine,
- DATABASE_NAME = db_name,
INSTALLED_APPS = [
'django.contrib.contenttypes',
'genericm2m',
'genericm2m.genericm2m_tests',
],
)
- from django.test.simple import run_tests
+ from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['genericm2m_tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
- failures = run_tests(test_args, verbosity=1, interactive=True)
+ TestRunner = get_runner(settings)
+ test_runner = TestRunner(verbosity=1, interactive=True)
+ failures = test_runner.run_tests(test_args)
sys.exit(failures)
-
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
Allow tests to be run on 1.4
|
## Code Before:
import sys
from os.path import dirname, abspath
from django.conf import settings
if len(sys.argv) > 1 and 'postgres' in sys.argv:
sys.argv.remove('postgres')
db_engine = 'postgresql_psycopg2'
db_name = 'test_main'
else:
db_engine = 'sqlite3'
db_name = ''
if not settings.configured:
settings.configure(
DATABASE_ENGINE = db_engine,
DATABASE_NAME = db_name,
INSTALLED_APPS = [
'django.contrib.contenttypes',
'genericm2m',
'genericm2m.genericm2m_tests',
],
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['genericm2m_tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
## Instruction:
Allow tests to be run on 1.4
## Code After:
import sys
from os.path import dirname, abspath
import django
from django.conf import settings
if len(sys.argv) > 1 and 'postgres' in sys.argv:
sys.argv.remove('postgres')
db_engine = 'django.db.backends.postgresql_psycopg2'
db_name = 'test_main'
else:
db_engine = 'django.db.backends.sqlite3'
db_name = ''
if not settings.configured:
settings.configure(
DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)),
INSTALLED_APPS = [
'django.contrib.contenttypes',
'genericm2m',
'genericm2m.genericm2m_tests',
],
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['genericm2m_tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
import sys
from os.path import dirname, abspath
+ import django
from django.conf import settings
if len(sys.argv) > 1 and 'postgres' in sys.argv:
sys.argv.remove('postgres')
- db_engine = 'postgresql_psycopg2'
+ db_engine = 'django.db.backends.postgresql_psycopg2'
? +++++++++++++++++++
db_name = 'test_main'
else:
- db_engine = 'sqlite3'
+ db_engine = 'django.db.backends.sqlite3'
db_name = ''
if not settings.configured:
settings.configure(
+ DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)),
- DATABASE_ENGINE = db_engine,
- DATABASE_NAME = db_name,
INSTALLED_APPS = [
'django.contrib.contenttypes',
'genericm2m',
'genericm2m.genericm2m_tests',
],
)
- from django.test.simple import run_tests
? ----- ^^ ^^^
+ from django.test.utils import get_runner
? ++++ ++++ ^ ^
def runtests(*test_args):
if not test_args:
test_args = ['genericm2m_tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
- failures = run_tests(test_args, verbosity=1, interactive=True)
+ TestRunner = get_runner(settings)
+ test_runner = TestRunner(verbosity=1, interactive=True)
+ failures = test_runner.run_tests(test_args)
sys.exit(failures)
-
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
e4e930587e6ad145dbdbf1f742b942d63bf645a2
|
wandb/git_repo.py
|
wandb/git_repo.py
|
from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
return self.repo.create_tag("wandb/"+name, message=message, force=True)
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
|
from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
try:
return self.repo.create_tag("wandb/"+name, message=message, force=True)
except GitCommandError:
print("Failed to tag repository.")
return None
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
|
Handle no git user configured
|
Handle no git user configured
|
Python
|
mit
|
wandb/client,wandb/client,wandb/client
|
from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
+ try:
- return self.repo.create_tag("wandb/"+name, message=message, force=True)
+ return self.repo.create_tag("wandb/"+name, message=message, force=True)
+ except GitCommandError:
+ print("Failed to tag repository.")
+ return None
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
|
Handle no git user configured
|
## Code Before:
from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
return self.repo.create_tag("wandb/"+name, message=message, force=True)
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
## Instruction:
Handle no git user configured
## Code After:
from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
try:
return self.repo.create_tag("wandb/"+name, message=message, force=True)
except GitCommandError:
print("Failed to tag repository.")
return None
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
|
from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
+ try:
- return self.repo.create_tag("wandb/"+name, message=message, force=True)
+ return self.repo.create_tag("wandb/"+name, message=message, force=True)
? ++++
+ except GitCommandError:
+ print("Failed to tag repository.")
+ return None
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
|
3cf7ca56ac156154dc08433955fff4b15e7eb331
|
mpd_mypy.py
|
mpd_mypy.py
|
import mpd
SERVER = "localhost"
PORT = 6600
def connect(mpdclient):
"""
Handle connection to the mpd server
"""
mpdclient.connect(SERVER, PORT)
mpdclient = mpd.MPDClient()
connect(mpdclient)
bands = set(mpdclient.list("artist"))
|
import mpd
SERVER = "localhost"
PORT = 6600
def connect(mpdclient):
"""
Handle connection to the mpd server
"""
mpdclient.connect(SERVER, PORT)
mpdclient = mpd.MPDClient()
connect(mpdclient)
bands = set(str(artist).lower() for artist in mpdclient.list("artist")
if artist != "")
print(bands)
|
Apply filters on artists to clean duplicates
|
Apply filters on artists to clean duplicates
|
Python
|
bsd-2-clause
|
Anthony25/mpd_muspy
|
import mpd
SERVER = "localhost"
PORT = 6600
def connect(mpdclient):
"""
Handle connection to the mpd server
"""
mpdclient.connect(SERVER, PORT)
mpdclient = mpd.MPDClient()
connect(mpdclient)
- bands = set(mpdclient.list("artist"))
+ bands = set(str(artist).lower() for artist in mpdclient.list("artist")
+ if artist != "")
+ print(bands)
|
Apply filters on artists to clean duplicates
|
## Code Before:
import mpd
SERVER = "localhost"
PORT = 6600
def connect(mpdclient):
"""
Handle connection to the mpd server
"""
mpdclient.connect(SERVER, PORT)
mpdclient = mpd.MPDClient()
connect(mpdclient)
bands = set(mpdclient.list("artist"))
## Instruction:
Apply filters on artists to clean duplicates
## Code After:
import mpd
SERVER = "localhost"
PORT = 6600
def connect(mpdclient):
"""
Handle connection to the mpd server
"""
mpdclient.connect(SERVER, PORT)
mpdclient = mpd.MPDClient()
connect(mpdclient)
bands = set(str(artist).lower() for artist in mpdclient.list("artist")
if artist != "")
print(bands)
|
import mpd
SERVER = "localhost"
PORT = 6600
def connect(mpdclient):
"""
Handle connection to the mpd server
"""
mpdclient.connect(SERVER, PORT)
mpdclient = mpd.MPDClient()
connect(mpdclient)
- bands = set(mpdclient.list("artist"))
+ bands = set(str(artist).lower() for artist in mpdclient.list("artist")
+ if artist != "")
+ print(bands)
|
021225cbce30b70c350133f5ae3cae9409bdd6ae
|
dbaas/dbaas_services/analyzing/admin/analyze.py
|
dbaas/dbaas_services/analyzing/admin/analyze.py
|
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name",)
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
Add filters to analyzing admin
|
Add filters to analyzing admin
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
+ search_fields = ("analyzed_at", "database_name", "engine_name",
+ "environment_name", "instance_name",)
+ list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
Add filters to analyzing admin
|
## Code Before:
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
## Instruction:
Add filters to analyzing admin
## Code After:
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name",)
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
+ search_fields = ("analyzed_at", "database_name", "engine_name",
+ "environment_name", "instance_name",)
+ list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
86197635800e6b19a6b95e9be932999c79042720
|
dipy/utils/arrfuncs.py
|
dipy/utils/arrfuncs.py
|
""" Utilities to manipulate numpy arrays """
import sys
import numpy as np
from nibabel.volumeutils import endian_codes, native_code, swapped_code
def as_native_array(arr):
""" Return `arr` as native byteordered array
If arr is already native byte ordered, return unchanged. If it is opposite
endian, then make a native byte ordered copy and return that
Parameters
----------
arr : ndarray
Returns
-------
native_arr : ndarray
If `arr` was native order, this is just `arr`. Otherwise it's a new
array such that ``np.all(native_arr == arr)``, with native byte
ordering.
"""
if endian_codes[arr.dtype.byteorder] == native_code:
return arr
return arr.byteswap().newbyteorder()
|
""" Utilities to manipulate numpy arrays """
import sys
import numpy as np
from nibabel.volumeutils import endian_codes, native_code, swapped_code
def as_native_array(arr):
""" Return `arr` as native byteordered array
If arr is already native byte ordered, return unchanged. If it is opposite
endian, then make a native byte ordered copy and return that
Parameters
----------
arr : ndarray
Returns
-------
native_arr : ndarray
If `arr` was native order, this is just `arr`. Otherwise it's a new
array such that ``np.all(native_arr == arr)``, with native byte
ordering.
"""
if endian_codes[arr.dtype.byteorder] == native_code:
return arr
return arr.byteswap().newbyteorder()
def pinv_vec(a, rcond=1e-15):
"""Vectorized version of numpy.linalg.pinv"""
a = np.asarray(a)
swap = np.arange(a.ndim)
swap[[-2, -1]] = swap[[-1, -2]]
u, s, v = np.linalg.svd(a, full_matrices=False)
cutoff = np.maximum.reduce(s, axis=-1, keepdims=True) * rcond
mask = s > cutoff
s[mask] = 1. / s[mask]
s[~mask] = 0
return np.einsum('...ij,...jk',
np.transpose(v, swap) * s[..., None, :],
np.transpose(u, swap))
|
Add vectorized version of np.linalg.pinv
|
Add vectorized version of np.linalg.pinv
|
Python
|
bsd-3-clause
|
matthieudumont/dipy,JohnGriffiths/dipy,FrancoisRheaultUS/dipy,matthieudumont/dipy,StongeEtienne/dipy,villalonreina/dipy,FrancoisRheaultUS/dipy,JohnGriffiths/dipy,nilgoyyou/dipy,villalonreina/dipy,nilgoyyou/dipy,StongeEtienne/dipy
|
""" Utilities to manipulate numpy arrays """
import sys
import numpy as np
from nibabel.volumeutils import endian_codes, native_code, swapped_code
def as_native_array(arr):
""" Return `arr` as native byteordered array
If arr is already native byte ordered, return unchanged. If it is opposite
endian, then make a native byte ordered copy and return that
Parameters
----------
arr : ndarray
Returns
-------
native_arr : ndarray
If `arr` was native order, this is just `arr`. Otherwise it's a new
array such that ``np.all(native_arr == arr)``, with native byte
ordering.
"""
if endian_codes[arr.dtype.byteorder] == native_code:
return arr
return arr.byteswap().newbyteorder()
+ def pinv_vec(a, rcond=1e-15):
+ """Vectorized version of numpy.linalg.pinv"""
+
+ a = np.asarray(a)
+ swap = np.arange(a.ndim)
+ swap[[-2, -1]] = swap[[-1, -2]]
+ u, s, v = np.linalg.svd(a, full_matrices=False)
+ cutoff = np.maximum.reduce(s, axis=-1, keepdims=True) * rcond
+ mask = s > cutoff
+ s[mask] = 1. / s[mask]
+ s[~mask] = 0
+ return np.einsum('...ij,...jk',
+ np.transpose(v, swap) * s[..., None, :],
+ np.transpose(u, swap))
+
|
Add vectorized version of np.linalg.pinv
|
## Code Before:
""" Utilities to manipulate numpy arrays """
import sys
import numpy as np
from nibabel.volumeutils import endian_codes, native_code, swapped_code
def as_native_array(arr):
""" Return `arr` as native byteordered array
If arr is already native byte ordered, return unchanged. If it is opposite
endian, then make a native byte ordered copy and return that
Parameters
----------
arr : ndarray
Returns
-------
native_arr : ndarray
If `arr` was native order, this is just `arr`. Otherwise it's a new
array such that ``np.all(native_arr == arr)``, with native byte
ordering.
"""
if endian_codes[arr.dtype.byteorder] == native_code:
return arr
return arr.byteswap().newbyteorder()
## Instruction:
Add vectorized version of np.linalg.pinv
## Code After:
""" Utilities to manipulate numpy arrays """
import sys
import numpy as np
from nibabel.volumeutils import endian_codes, native_code, swapped_code
def as_native_array(arr):
""" Return `arr` as native byteordered array
If arr is already native byte ordered, return unchanged. If it is opposite
endian, then make a native byte ordered copy and return that
Parameters
----------
arr : ndarray
Returns
-------
native_arr : ndarray
If `arr` was native order, this is just `arr`. Otherwise it's a new
array such that ``np.all(native_arr == arr)``, with native byte
ordering.
"""
if endian_codes[arr.dtype.byteorder] == native_code:
return arr
return arr.byteswap().newbyteorder()
def pinv_vec(a, rcond=1e-15):
"""Vectorized version of numpy.linalg.pinv"""
a = np.asarray(a)
swap = np.arange(a.ndim)
swap[[-2, -1]] = swap[[-1, -2]]
u, s, v = np.linalg.svd(a, full_matrices=False)
cutoff = np.maximum.reduce(s, axis=-1, keepdims=True) * rcond
mask = s > cutoff
s[mask] = 1. / s[mask]
s[~mask] = 0
return np.einsum('...ij,...jk',
np.transpose(v, swap) * s[..., None, :],
np.transpose(u, swap))
|
""" Utilities to manipulate numpy arrays """
import sys
import numpy as np
from nibabel.volumeutils import endian_codes, native_code, swapped_code
def as_native_array(arr):
""" Return `arr` as native byteordered array
If arr is already native byte ordered, return unchanged. If it is opposite
endian, then make a native byte ordered copy and return that
Parameters
----------
arr : ndarray
Returns
-------
native_arr : ndarray
If `arr` was native order, this is just `arr`. Otherwise it's a new
array such that ``np.all(native_arr == arr)``, with native byte
ordering.
"""
if endian_codes[arr.dtype.byteorder] == native_code:
return arr
return arr.byteswap().newbyteorder()
+
+ def pinv_vec(a, rcond=1e-15):
+ """Vectorized version of numpy.linalg.pinv"""
+
+ a = np.asarray(a)
+ swap = np.arange(a.ndim)
+ swap[[-2, -1]] = swap[[-1, -2]]
+ u, s, v = np.linalg.svd(a, full_matrices=False)
+ cutoff = np.maximum.reduce(s, axis=-1, keepdims=True) * rcond
+ mask = s > cutoff
+ s[mask] = 1. / s[mask]
+ s[~mask] = 0
+ return np.einsum('...ij,...jk',
+ np.transpose(v, swap) * s[..., None, :],
+ np.transpose(u, swap))
|
661299275942813a0c45aa90db64c9603d287839
|
lib_common/src/d1_common/iter/string.py
|
lib_common/src/d1_common/iter/string.py
|
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
|
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
def __len__(self):
return len(self._string)
@property
def size(self):
return len(self._string)
|
Improve StringIterator to allow for more general usage
|
Improve StringIterator to allow for more general usage
|
Python
|
apache-2.0
|
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
|
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
+ def __len__(self):
+ return len(self._string)
+
+ @property
+ def size(self):
+ return len(self._string)
+
|
Improve StringIterator to allow for more general usage
|
## Code Before:
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
## Instruction:
Improve StringIterator to allow for more general usage
## Code After:
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
def __len__(self):
return len(self._string)
@property
def size(self):
return len(self._string)
|
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
+
+ def __len__(self):
+ return len(self._string)
+
+ @property
+ def size(self):
+ return len(self._string)
|
b28dd26792be9125d2fd3d5657431bc6ee7a5470
|
lobster/cmssw/actions.py
|
lobster/cmssw/actions.py
|
import datetime
import multiprocessing
from lobster.cmssw.plotting import Plotter
logger = multiprocessing.get_logger()
class DummyPlotter(object):
def make_plots(*args, **kwargs):
pass
class Actions(object):
def __init__(self, config):
if 'plotdir' in config:
logger.info('plots in {0} will be updated automatically'.format(config['plotdir']))
if 'foremen logs' in config:
logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs'])))
plotter = Plotter(config['filename'], config['plotdir'])
else:
plotter = DummyPlotter()
def plotf(q):
while q.get() not in ('stop', None):
plotter.make_plots(foremen=config.get('foremen logs'))
self.plotq = multiprocessing.Queue()
self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,))
self.plotp.start()
self.__last = datetime.datetime.now()
def __del__(self):
self.plotq.put('stop')
def take(self):
now = datetime.datetime.now()
if (now - self.__last).seconds > 15 * 60:
self.plotq.put('plot')
self.__last = now
|
import datetime
import multiprocessing
from lobster.cmssw.plotting import Plotter
logger = multiprocessing.get_logger()
class DummyPlotter(object):
def make_plots(*args, **kwargs):
pass
class Actions(object):
def __init__(self, config):
if 'plotdir' in config:
logger.info('plots in {0} will be updated automatically'.format(config['plotdir']))
if 'foremen logs' in config:
logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs'])))
plotter = Plotter(config['filename'], config['plotdir'])
else:
plotter = DummyPlotter()
def plotf(q):
while q.get() not in ('stop', None):
plotter.make_plots(foremen=config.get('foremen logs'))
self.plotq = multiprocessing.Queue()
self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,))
self.plotp.start()
logger.info('spawning process for automatic plotting with pid {0}'.format(self.plotp.pid))
self.__last = datetime.datetime.now()
def __del__(self):
self.plotq.put('stop')
def take(self):
now = datetime.datetime.now()
if (now - self.__last).seconds > 15 * 60:
self.plotq.put('plot')
self.__last = now
|
Add message in log with plotting process id.
|
Add message in log with plotting process id.
|
Python
|
mit
|
matz-e/lobster,matz-e/lobster,matz-e/lobster
|
import datetime
import multiprocessing
from lobster.cmssw.plotting import Plotter
logger = multiprocessing.get_logger()
class DummyPlotter(object):
def make_plots(*args, **kwargs):
pass
class Actions(object):
def __init__(self, config):
if 'plotdir' in config:
logger.info('plots in {0} will be updated automatically'.format(config['plotdir']))
if 'foremen logs' in config:
logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs'])))
plotter = Plotter(config['filename'], config['plotdir'])
else:
plotter = DummyPlotter()
def plotf(q):
while q.get() not in ('stop', None):
plotter.make_plots(foremen=config.get('foremen logs'))
self.plotq = multiprocessing.Queue()
self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,))
self.plotp.start()
+ logger.info('spawning process for automatic plotting with pid {0}'.format(self.plotp.pid))
self.__last = datetime.datetime.now()
def __del__(self):
self.plotq.put('stop')
def take(self):
now = datetime.datetime.now()
if (now - self.__last).seconds > 15 * 60:
self.plotq.put('plot')
self.__last = now
|
Add message in log with plotting process id.
|
## Code Before:
import datetime
import multiprocessing
from lobster.cmssw.plotting import Plotter
logger = multiprocessing.get_logger()
class DummyPlotter(object):
def make_plots(*args, **kwargs):
pass
class Actions(object):
def __init__(self, config):
if 'plotdir' in config:
logger.info('plots in {0} will be updated automatically'.format(config['plotdir']))
if 'foremen logs' in config:
logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs'])))
plotter = Plotter(config['filename'], config['plotdir'])
else:
plotter = DummyPlotter()
def plotf(q):
while q.get() not in ('stop', None):
plotter.make_plots(foremen=config.get('foremen logs'))
self.plotq = multiprocessing.Queue()
self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,))
self.plotp.start()
self.__last = datetime.datetime.now()
def __del__(self):
self.plotq.put('stop')
def take(self):
now = datetime.datetime.now()
if (now - self.__last).seconds > 15 * 60:
self.plotq.put('plot')
self.__last = now
## Instruction:
Add message in log with plotting process id.
## Code After:
import datetime
import multiprocessing
from lobster.cmssw.plotting import Plotter
logger = multiprocessing.get_logger()
class DummyPlotter(object):
def make_plots(*args, **kwargs):
pass
class Actions(object):
def __init__(self, config):
if 'plotdir' in config:
logger.info('plots in {0} will be updated automatically'.format(config['plotdir']))
if 'foremen logs' in config:
logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs'])))
plotter = Plotter(config['filename'], config['plotdir'])
else:
plotter = DummyPlotter()
def plotf(q):
while q.get() not in ('stop', None):
plotter.make_plots(foremen=config.get('foremen logs'))
self.plotq = multiprocessing.Queue()
self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,))
self.plotp.start()
logger.info('spawning process for automatic plotting with pid {0}'.format(self.plotp.pid))
self.__last = datetime.datetime.now()
def __del__(self):
self.plotq.put('stop')
def take(self):
now = datetime.datetime.now()
if (now - self.__last).seconds > 15 * 60:
self.plotq.put('plot')
self.__last = now
|
import datetime
import multiprocessing
from lobster.cmssw.plotting import Plotter
logger = multiprocessing.get_logger()
class DummyPlotter(object):
def make_plots(*args, **kwargs):
pass
class Actions(object):
def __init__(self, config):
if 'plotdir' in config:
logger.info('plots in {0} will be updated automatically'.format(config['plotdir']))
if 'foremen logs' in config:
logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs'])))
plotter = Plotter(config['filename'], config['plotdir'])
else:
plotter = DummyPlotter()
def plotf(q):
while q.get() not in ('stop', None):
plotter.make_plots(foremen=config.get('foremen logs'))
self.plotq = multiprocessing.Queue()
self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,))
self.plotp.start()
+ logger.info('spawning process for automatic plotting with pid {0}'.format(self.plotp.pid))
self.__last = datetime.datetime.now()
def __del__(self):
self.plotq.put('stop')
def take(self):
now = datetime.datetime.now()
if (now - self.__last).seconds > 15 * 60:
self.plotq.put('plot')
self.__last = now
|
0d33cf650480ea7b71e13ef67b566fc6ec1c93ee
|
demo/demo/todos/models.py
|
demo/demo/todos/models.py
|
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
complete = models.BooleanField()
|
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
|
Remove "complete" boolean from demo todo model.
|
Remove "complete" boolean from demo todo model.
|
Python
|
bsd-3-clause
|
jgerigmeyer/jquery-django-superformset,jgerigmeyer/jquery-django-superformset
|
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
- complete = models.BooleanField()
|
Remove "complete" boolean from demo todo model.
|
## Code Before:
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
complete = models.BooleanField()
## Instruction:
Remove "complete" boolean from demo todo model.
## Code After:
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
|
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
- complete = models.BooleanField()
|
a2b418c89e6ad3f85c88b7dfcc2238d62cb2e36e
|
karanja_me/polls/tests.py
|
karanja_me/polls/tests.py
|
from django.test import TestCase
# Create your tests here.
|
import datetime
from django.utils import timezone
from django.test import TestCase
from .models import Question
class QuestionMethodTest(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recenlty() should return False for questions that the
pub_date is in the future
"""
time = timezone.now() + datetime.timedelta(days = 30)
future_question = Question(pub_date = time)
self.assertEqual(future_question_was_published_recently(), False)
|
Test case for Question method added
|
Test case for Question method added
A test case to avoid future published questions read as recently added
|
Python
|
mit
|
yoda-yoda/django-dive-in,yoda-yoda/django-dive-in,denisKaranja/django-dive-in,denisKaranja/django-dive-in
|
+ import datetime
+
+ from django.utils import timezone
from django.test import TestCase
- # Create your tests here.
+ from .models import Question
+ class QuestionMethodTest(TestCase):
+
+ def test_was_published_recently_with_future_question(self):
+ """
+ was_published_recenlty() should return False for questions that the
+ pub_date is in the future
+ """
+ time = timezone.now() + datetime.timedelta(days = 30)
+ future_question = Question(pub_date = time)
+ self.assertEqual(future_question_was_published_recently(), False)
+
+
|
Test case for Question method added
|
## Code Before:
from django.test import TestCase
# Create your tests here.
## Instruction:
Test case for Question method added
## Code After:
import datetime
from django.utils import timezone
from django.test import TestCase
from .models import Question
class QuestionMethodTest(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recenlty() should return False for questions that the
pub_date is in the future
"""
time = timezone.now() + datetime.timedelta(days = 30)
future_question = Question(pub_date = time)
self.assertEqual(future_question_was_published_recently(), False)
|
+ import datetime
+
+ from django.utils import timezone
from django.test import TestCase
- # Create your tests here.
+ from .models import Question
+
+ class QuestionMethodTest(TestCase):
+
+ def test_was_published_recently_with_future_question(self):
+ """
+ was_published_recenlty() should return False for questions that the
+ pub_date is in the future
+ """
+ time = timezone.now() + datetime.timedelta(days = 30)
+ future_question = Question(pub_date = time)
+ self.assertEqual(future_question_was_published_recently(), False)
+
|
c2fd8515666476cc0b6760b72b6cd71ef030e6f4
|
thinc/neural/tests/unit/Affine/test_init.py
|
thinc/neural/tests/unit/Affine/test_init.py
|
from __future__ import unicode_literals
import pytest
from flexmock import flexmock
from hypothesis import given, strategies
import abc
from .... import vec2vec
from ....ops import NumpyOps
@pytest.fixture
def model_with_no_args():
model = vec2vec.Affine(ops=NumpyOps())
return model
def test_Affine_default_name(model_with_no_args):
assert model_with_no_args.name == 'affine'
def test_Affine_defaults_to_cpu(model_with_no_args):
assert isinstance(model_with_no_args.ops, NumpyOps)
def test_Affine_defaults_to_no_layers(model_with_no_args):
assert model_with_no_args.layers == []
def test_Affine_defaults_to_param_descriptions(model_with_no_args):
W_desc, b_desc = model_with_no_args.describe_params
xavier_init = model_with_no_args.ops.xavier_uniform_init
assert W_desc == ('W-affine', (None, None), xavier_init)
assert b_desc == ('b-affine', (None,), None)
def test_Model_defaults_to_no_output_shape(model_with_no_args):
assert model_with_no_args.output_shape == None
def test_Model_defaults_to_no_input_shape(model_with_no_args):
assert model_with_no_args.input_shape == None
def test_Model_defaults_to_0_size(model_with_no_args):
assert model_with_no_args.size == None
|
from __future__ import unicode_literals
import pytest
from mock import Mock, patch
from hypothesis import given, strategies
import abc
from ...._classes.affine import Affine
from ....ops import NumpyOps
@pytest.fixture
def model():
orig_desc = dict(Affine.descriptions)
orig_on_init = list(Affine.on_init_hooks)
Affine.descriptions = {
name: Mock(desc) for (name, desc) in Affine.descriptions.items()
}
Affine.on_init_hooks = [Mock(hook) for hook in Affine.on_init_hooks]
model = Affine()
for attr in model.descriptions:
setattr(model, attr, None)
Affine.descriptions = dict(orig_desc)
Affine.on_init_hooks = orig_on_init
return model
def test_Affine_default_name(model):
assert model.name == 'affine'
def test_Affine_calls_default_descriptions(model):
assert len(model.descriptions) == 5
for name, desc in model.descriptions.items():
desc.assert_called()
assert 'nB' in model.descriptions
assert 'nI' in model.descriptions
assert 'nO' in model.descriptions
assert 'W' in model.descriptions
assert 'b' in model.descriptions
def test_Affine_calls_init_hooks(model):
for hook in model.on_init_hooks:
hook.assert_called()
|
Test Affine init calls hooks correctly, and sets descriptors
|
Test Affine init calls hooks correctly, and sets descriptors
|
Python
|
mit
|
explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc
|
from __future__ import unicode_literals
import pytest
- from flexmock import flexmock
+ from mock import Mock, patch
from hypothesis import given, strategies
import abc
- from .... import vec2vec
+ from ...._classes.affine import Affine
from ....ops import NumpyOps
@pytest.fixture
- def model_with_no_args():
- model = vec2vec.Affine(ops=NumpyOps())
+ def model():
+ orig_desc = dict(Affine.descriptions)
+ orig_on_init = list(Affine.on_init_hooks)
+ Affine.descriptions = {
+ name: Mock(desc) for (name, desc) in Affine.descriptions.items()
+ }
+ Affine.on_init_hooks = [Mock(hook) for hook in Affine.on_init_hooks]
+ model = Affine()
+ for attr in model.descriptions:
+ setattr(model, attr, None)
+ Affine.descriptions = dict(orig_desc)
+ Affine.on_init_hooks = orig_on_init
return model
- def test_Affine_default_name(model_with_no_args):
+ def test_Affine_default_name(model):
- assert model_with_no_args.name == 'affine'
+ assert model.name == 'affine'
- def test_Affine_defaults_to_cpu(model_with_no_args):
- assert isinstance(model_with_no_args.ops, NumpyOps)
+ def test_Affine_calls_default_descriptions(model):
+ assert len(model.descriptions) == 5
+ for name, desc in model.descriptions.items():
+ desc.assert_called()
+ assert 'nB' in model.descriptions
+ assert 'nI' in model.descriptions
+ assert 'nO' in model.descriptions
+ assert 'W' in model.descriptions
+ assert 'b' in model.descriptions
- def test_Affine_defaults_to_no_layers(model_with_no_args):
- assert model_with_no_args.layers == []
+ def test_Affine_calls_init_hooks(model):
+ for hook in model.on_init_hooks:
+ hook.assert_called()
-
- def test_Affine_defaults_to_param_descriptions(model_with_no_args):
- W_desc, b_desc = model_with_no_args.describe_params
- xavier_init = model_with_no_args.ops.xavier_uniform_init
- assert W_desc == ('W-affine', (None, None), xavier_init)
- assert b_desc == ('b-affine', (None,), None)
-
-
- def test_Model_defaults_to_no_output_shape(model_with_no_args):
- assert model_with_no_args.output_shape == None
-
-
- def test_Model_defaults_to_no_input_shape(model_with_no_args):
- assert model_with_no_args.input_shape == None
-
-
- def test_Model_defaults_to_0_size(model_with_no_args):
- assert model_with_no_args.size == None
-
|
Test Affine init calls hooks correctly, and sets descriptors
|
## Code Before:
from __future__ import unicode_literals
import pytest
from flexmock import flexmock
from hypothesis import given, strategies
import abc
from .... import vec2vec
from ....ops import NumpyOps
@pytest.fixture
def model_with_no_args():
model = vec2vec.Affine(ops=NumpyOps())
return model
def test_Affine_default_name(model_with_no_args):
assert model_with_no_args.name == 'affine'
def test_Affine_defaults_to_cpu(model_with_no_args):
assert isinstance(model_with_no_args.ops, NumpyOps)
def test_Affine_defaults_to_no_layers(model_with_no_args):
assert model_with_no_args.layers == []
def test_Affine_defaults_to_param_descriptions(model_with_no_args):
W_desc, b_desc = model_with_no_args.describe_params
xavier_init = model_with_no_args.ops.xavier_uniform_init
assert W_desc == ('W-affine', (None, None), xavier_init)
assert b_desc == ('b-affine', (None,), None)
def test_Model_defaults_to_no_output_shape(model_with_no_args):
assert model_with_no_args.output_shape == None
def test_Model_defaults_to_no_input_shape(model_with_no_args):
assert model_with_no_args.input_shape == None
def test_Model_defaults_to_0_size(model_with_no_args):
assert model_with_no_args.size == None
## Instruction:
Test Affine init calls hooks correctly, and sets descriptors
## Code After:
from __future__ import unicode_literals
import pytest
from mock import Mock, patch
from hypothesis import given, strategies
import abc
from ...._classes.affine import Affine
from ....ops import NumpyOps
@pytest.fixture
def model():
orig_desc = dict(Affine.descriptions)
orig_on_init = list(Affine.on_init_hooks)
Affine.descriptions = {
name: Mock(desc) for (name, desc) in Affine.descriptions.items()
}
Affine.on_init_hooks = [Mock(hook) for hook in Affine.on_init_hooks]
model = Affine()
for attr in model.descriptions:
setattr(model, attr, None)
Affine.descriptions = dict(orig_desc)
Affine.on_init_hooks = orig_on_init
return model
def test_Affine_default_name(model):
assert model.name == 'affine'
def test_Affine_calls_default_descriptions(model):
assert len(model.descriptions) == 5
for name, desc in model.descriptions.items():
desc.assert_called()
assert 'nB' in model.descriptions
assert 'nI' in model.descriptions
assert 'nO' in model.descriptions
assert 'W' in model.descriptions
assert 'b' in model.descriptions
def test_Affine_calls_init_hooks(model):
for hook in model.on_init_hooks:
hook.assert_called()
|
from __future__ import unicode_literals
import pytest
- from flexmock import flexmock
+ from mock import Mock, patch
from hypothesis import given, strategies
import abc
- from .... import vec2vec
+ from ...._classes.affine import Affine
from ....ops import NumpyOps
@pytest.fixture
- def model_with_no_args():
- model = vec2vec.Affine(ops=NumpyOps())
+ def model():
+ orig_desc = dict(Affine.descriptions)
+ orig_on_init = list(Affine.on_init_hooks)
+ Affine.descriptions = {
+ name: Mock(desc) for (name, desc) in Affine.descriptions.items()
+ }
+ Affine.on_init_hooks = [Mock(hook) for hook in Affine.on_init_hooks]
+ model = Affine()
+ for attr in model.descriptions:
+ setattr(model, attr, None)
+ Affine.descriptions = dict(orig_desc)
+ Affine.on_init_hooks = orig_on_init
return model
- def test_Affine_default_name(model_with_no_args):
? -------------
+ def test_Affine_default_name(model):
- assert model_with_no_args.name == 'affine'
? -------------
+ assert model.name == 'affine'
- def test_Affine_defaults_to_cpu(model_with_no_args):
- assert isinstance(model_with_no_args.ops, NumpyOps)
+ def test_Affine_calls_default_descriptions(model):
+ assert len(model.descriptions) == 5
+ for name, desc in model.descriptions.items():
+ desc.assert_called()
+ assert 'nB' in model.descriptions
+ assert 'nI' in model.descriptions
+ assert 'nO' in model.descriptions
+ assert 'W' in model.descriptions
+ assert 'b' in model.descriptions
+ def test_Affine_calls_init_hooks(model):
+ for hook in model.on_init_hooks:
+ hook.assert_called()
- def test_Affine_defaults_to_no_layers(model_with_no_args):
- assert model_with_no_args.layers == []
-
-
- def test_Affine_defaults_to_param_descriptions(model_with_no_args):
- W_desc, b_desc = model_with_no_args.describe_params
- xavier_init = model_with_no_args.ops.xavier_uniform_init
- assert W_desc == ('W-affine', (None, None), xavier_init)
- assert b_desc == ('b-affine', (None,), None)
-
-
- def test_Model_defaults_to_no_output_shape(model_with_no_args):
- assert model_with_no_args.output_shape == None
-
-
- def test_Model_defaults_to_no_input_shape(model_with_no_args):
- assert model_with_no_args.input_shape == None
-
-
- def test_Model_defaults_to_0_size(model_with_no_args):
- assert model_with_no_args.size == None
|
ddcf3490990f9b78f0009937bc9ddc2df0331b8e
|
lib/booki/account/templatetags/profile.py
|
lib/booki/account/templatetags/profile.py
|
import os
from django.db.models import get_model
from django.template import Library, Node, TemplateSyntaxError, resolve_variable
from django.conf import settings
from booki.account.models import UserProfile
register = Library()
class ProfileImageNode(Node):
def __init__(self, user):
self.user = user
def render(self, context):
user = resolve_variable(self.user, context)
# should check if it exists and etc
profile = UserProfile.objects.get(user=user)
if not profile.image:
return """<img src="%s/profile_images/_anonymous.jpg"/>""" % settings.DATA_URL
filename = profile.image.name
return """<img src="%s/profile_images/%s"/>""" % (settings.DATA_URL, filename.split('/')[-1])
@register.tag
def profile_image(parser, token):
"""
Django tag. Shows user profile image. If user does not have defined image it will show anonymous image.
@type token: C{string}
@param token: Variable name that points to C{User} object.
"""
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError
return ProfileImageNode(bits[1])
|
import os
from django.db.models import get_model
from django.template import Library, Node, TemplateSyntaxError, resolve_variable
from django.conf import settings
from booki.account.models import UserProfile
register = Library()
class ProfileImageNode(Node):
def __init__(self, user):
self.user = user
def render(self, context):
user = resolve_variable(self.user, context)
# should check if it exists and etc
profile = UserProfile.objects.get(user=user)
if not profile.image:
return """<img src="%s/images/anonymous.jpg"/>""" % settings.SITE_STATIC_URL
filename = profile.image.name
return """<img src="%s/profile_images/%s"/>""" % (settings.DATA_URL, filename.split('/')[-1])
@register.tag
def profile_image(parser, token):
"""
Django tag. Shows user profile image. If user does not have defined image it will show anonymous image.
@type token: C{string}
@param token: Variable name that points to C{User} object.
"""
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError
return ProfileImageNode(bits[1])
|
Fix image link to anonymous user.
|
Fix image link to anonymous user.
|
Python
|
agpl-3.0
|
kronoscode/Booktype,MiczFlor/Booktype,rob-hills/Booktype,ride90/Booktype,okffi/booktype,kronoscode/Booktype,danielhjames/Booktype,danielhjames/Booktype,eos87/Booktype,ride90/Booktype,MiczFlor/Booktype,danielhjames/Booktype,kronoscode/Booktype,btat/Booktype,ride90/Booktype,aerkalov/Booktype,danielhjames/Booktype,btat/Booktype,ride90/Booktype,sourcefabric/Booktype,okffi/booktype,danielhjames/Booktype,rob-hills/Booktype,okffi/booktype,sourcefabric/Booktype,kronoscode/Booktype,btat/Booktype,ride90/Booktype,olegpshenichniy/Booktype,kronoscode/Booktype,olegpshenichniy/Booktype,eos87/Booktype,sourcefabric/Booktype,MiczFlor/Booktype,btat/Booktype,okffi/booktype,MiczFlor/Booktype,MiczFlor/Booktype,rob-hills/Booktype,olegpshenichniy/Booktype,olegpshenichniy/Booktype,sourcefabric/Booktype,eos87/Booktype,okffi/booktype,aerkalov/Booktype,rob-hills/Booktype,eos87/Booktype,olegpshenichniy/Booktype,aerkalov/Booktype,eos87/Booktype,sourcefabric/Booktype
|
import os
from django.db.models import get_model
from django.template import Library, Node, TemplateSyntaxError, resolve_variable
from django.conf import settings
from booki.account.models import UserProfile
register = Library()
class ProfileImageNode(Node):
def __init__(self, user):
self.user = user
def render(self, context):
user = resolve_variable(self.user, context)
# should check if it exists and etc
profile = UserProfile.objects.get(user=user)
if not profile.image:
- return """<img src="%s/profile_images/_anonymous.jpg"/>""" % settings.DATA_URL
+ return """<img src="%s/images/anonymous.jpg"/>""" % settings.SITE_STATIC_URL
filename = profile.image.name
return """<img src="%s/profile_images/%s"/>""" % (settings.DATA_URL, filename.split('/')[-1])
@register.tag
def profile_image(parser, token):
"""
Django tag. Shows user profile image. If user does not have defined image it will show anonymous image.
@type token: C{string}
@param token: Variable name that points to C{User} object.
"""
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError
return ProfileImageNode(bits[1])
|
Fix image link to anonymous user.
|
## Code Before:
import os
from django.db.models import get_model
from django.template import Library, Node, TemplateSyntaxError, resolve_variable
from django.conf import settings
from booki.account.models import UserProfile
register = Library()
class ProfileImageNode(Node):
def __init__(self, user):
self.user = user
def render(self, context):
user = resolve_variable(self.user, context)
# should check if it exists and etc
profile = UserProfile.objects.get(user=user)
if not profile.image:
return """<img src="%s/profile_images/_anonymous.jpg"/>""" % settings.DATA_URL
filename = profile.image.name
return """<img src="%s/profile_images/%s"/>""" % (settings.DATA_URL, filename.split('/')[-1])
@register.tag
def profile_image(parser, token):
"""
Django tag. Shows user profile image. If user does not have defined image it will show anonymous image.
@type token: C{string}
@param token: Variable name that points to C{User} object.
"""
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError
return ProfileImageNode(bits[1])
## Instruction:
Fix image link to anonymous user.
## Code After:
import os
from django.db.models import get_model
from django.template import Library, Node, TemplateSyntaxError, resolve_variable
from django.conf import settings
from booki.account.models import UserProfile
register = Library()
class ProfileImageNode(Node):
def __init__(self, user):
self.user = user
def render(self, context):
user = resolve_variable(self.user, context)
# should check if it exists and etc
profile = UserProfile.objects.get(user=user)
if not profile.image:
return """<img src="%s/images/anonymous.jpg"/>""" % settings.SITE_STATIC_URL
filename = profile.image.name
return """<img src="%s/profile_images/%s"/>""" % (settings.DATA_URL, filename.split('/')[-1])
@register.tag
def profile_image(parser, token):
"""
Django tag. Shows user profile image. If user does not have defined image it will show anonymous image.
@type token: C{string}
@param token: Variable name that points to C{User} object.
"""
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError
return ProfileImageNode(bits[1])
|
import os
from django.db.models import get_model
from django.template import Library, Node, TemplateSyntaxError, resolve_variable
from django.conf import settings
from booki.account.models import UserProfile
register = Library()
class ProfileImageNode(Node):
def __init__(self, user):
self.user = user
def render(self, context):
user = resolve_variable(self.user, context)
# should check if it exists and etc
profile = UserProfile.objects.get(user=user)
if not profile.image:
- return """<img src="%s/profile_images/_anonymous.jpg"/>""" % settings.DATA_URL
? -------- - ^ ^
+ return """<img src="%s/images/anonymous.jpg"/>""" % settings.SITE_STATIC_URL
? ^^^^^^^ ^^
filename = profile.image.name
return """<img src="%s/profile_images/%s"/>""" % (settings.DATA_URL, filename.split('/')[-1])
@register.tag
def profile_image(parser, token):
"""
Django tag. Shows user profile image. If user does not have defined image it will show anonymous image.
@type token: C{string}
@param token: Variable name that points to C{User} object.
"""
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError
return ProfileImageNode(bits[1])
|
56a8b900570200e63ee460dd7e2962cba2450b16
|
preparation/tools/build_assets.py
|
preparation/tools/build_assets.py
|
from copy import copy
import argparse
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage, ExplanationStorage
def generate_asset(resource, out_storage: ExplanationStorage):
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_trunk(trunk: str):
resource = resource_by_name(trunk + 'Resource')()
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
generate_asset(resource, out_storage)
print("Finished {} generation".format(trunk))
def make_argparser():
parser = argparse.ArgumentParser(description='Rebuild some asset')
names = [name.replace('Resource', '') for name in names_registered()]
parser.add_argument('resources',
metavar='RESOURCE',
nargs='+',
choices=names + ['all'],
help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names)))
return parser
def main(args=None):
if not isinstance(args, argparse.Namespace):
parser = make_argparser()
args = parser.parse_args(args)
assert all not in args.resources or len(args.resources) == 1
for name in args.resources:
rebuild_trunk(name)
if __name__ == '__main__':
main()
|
from copy import copy
import argparse
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage, ExplanationStorage
def generate_asset(resource, out_storage: ExplanationStorage):
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_trunk(trunk: str):
resource = resource_by_name(trunk + 'Resource')()
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
generate_asset(resource, out_storage)
print("Finished {} generation".format(trunk))
def make_argparser():
parser = argparse.ArgumentParser(description='Rebuild some asset')
names = [name.replace('Resource', '') for name in names_registered()]
parser.add_argument('resources',
metavar='RESOURCE',
nargs='+',
choices=names + ['all'],
help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names)))
return parser
def main(args=None):
if not isinstance(args, argparse.Namespace):
parser = make_argparser()
args = parser.parse_args(args)
assert 'all' not in args.resources or len(args.resources) == 1
if 'all' in args.resources:
args.resources = [name.replace('Resource', '') for name in names_registered()]
for name in args.resources:
rebuild_trunk(name)
if __name__ == '__main__':
main()
|
Fix bug with 'all' argument
|
Fix bug with 'all' argument
|
Python
|
mit
|
hatbot-team/hatbot_resources
|
from copy import copy
import argparse
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage, ExplanationStorage
def generate_asset(resource, out_storage: ExplanationStorage):
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_trunk(trunk: str):
resource = resource_by_name(trunk + 'Resource')()
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
generate_asset(resource, out_storage)
print("Finished {} generation".format(trunk))
def make_argparser():
parser = argparse.ArgumentParser(description='Rebuild some asset')
names = [name.replace('Resource', '') for name in names_registered()]
parser.add_argument('resources',
metavar='RESOURCE',
nargs='+',
choices=names + ['all'],
help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names)))
return parser
def main(args=None):
if not isinstance(args, argparse.Namespace):
parser = make_argparser()
args = parser.parse_args(args)
- assert all not in args.resources or len(args.resources) == 1
+ assert 'all' not in args.resources or len(args.resources) == 1
+ if 'all' in args.resources:
+ args.resources = [name.replace('Resource', '') for name in names_registered()]
for name in args.resources:
rebuild_trunk(name)
if __name__ == '__main__':
main()
|
Fix bug with 'all' argument
|
## Code Before:
from copy import copy
import argparse
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage, ExplanationStorage
def generate_asset(resource, out_storage: ExplanationStorage):
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_trunk(trunk: str):
resource = resource_by_name(trunk + 'Resource')()
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
generate_asset(resource, out_storage)
print("Finished {} generation".format(trunk))
def make_argparser():
parser = argparse.ArgumentParser(description='Rebuild some asset')
names = [name.replace('Resource', '') for name in names_registered()]
parser.add_argument('resources',
metavar='RESOURCE',
nargs='+',
choices=names + ['all'],
help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names)))
return parser
def main(args=None):
if not isinstance(args, argparse.Namespace):
parser = make_argparser()
args = parser.parse_args(args)
assert all not in args.resources or len(args.resources) == 1
for name in args.resources:
rebuild_trunk(name)
if __name__ == '__main__':
main()
## Instruction:
Fix bug with 'all' argument
## Code After:
from copy import copy
import argparse
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage, ExplanationStorage
def generate_asset(resource, out_storage: ExplanationStorage):
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_trunk(trunk: str):
resource = resource_by_name(trunk + 'Resource')()
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
generate_asset(resource, out_storage)
print("Finished {} generation".format(trunk))
def make_argparser():
parser = argparse.ArgumentParser(description='Rebuild some asset')
names = [name.replace('Resource', '') for name in names_registered()]
parser.add_argument('resources',
metavar='RESOURCE',
nargs='+',
choices=names + ['all'],
help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names)))
return parser
def main(args=None):
if not isinstance(args, argparse.Namespace):
parser = make_argparser()
args = parser.parse_args(args)
assert 'all' not in args.resources or len(args.resources) == 1
if 'all' in args.resources:
args.resources = [name.replace('Resource', '') for name in names_registered()]
for name in args.resources:
rebuild_trunk(name)
if __name__ == '__main__':
main()
|
from copy import copy
import argparse
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage, ExplanationStorage
def generate_asset(resource, out_storage: ExplanationStorage):
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_trunk(trunk: str):
resource = resource_by_name(trunk + 'Resource')()
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
generate_asset(resource, out_storage)
print("Finished {} generation".format(trunk))
def make_argparser():
parser = argparse.ArgumentParser(description='Rebuild some asset')
names = [name.replace('Resource', '') for name in names_registered()]
parser.add_argument('resources',
metavar='RESOURCE',
nargs='+',
choices=names + ['all'],
help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names)))
return parser
def main(args=None):
if not isinstance(args, argparse.Namespace):
parser = make_argparser()
args = parser.parse_args(args)
- assert all not in args.resources or len(args.resources) == 1
+ assert 'all' not in args.resources or len(args.resources) == 1
? + +
+ if 'all' in args.resources:
+ args.resources = [name.replace('Resource', '') for name in names_registered()]
for name in args.resources:
rebuild_trunk(name)
if __name__ == '__main__':
main()
|
c220c0a474a660c4c1167d42fdd0d48599b1b593
|
tests/test_pathutils.py
|
tests/test_pathutils.py
|
from os.path import join
import sublime
import sys
from unittest import TestCase
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
def test_grep_r(self):
pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
from os.path import join, realpath
import os
import sublime
import sys
from unittest import TestCase
from functools import wraps
def subl_patch(pkg, obj=None):
def subl_deco(fn):
@wraps(fn)
def wrap(*args):
nonlocal pkg
o = []
if obj != None:
o += [obj]
pkg = pkg + '.' + obj
try:
mock = __import__(pkg, globals(), locals(), o, 0)
except ImportError:
pkg = realpath(__file__).split(os.sep)[-3] + '.' + pkg
mock = __import__(pkg, globals(), locals(), o, 0)
args += (mock,)
fn(*args)
return wrap
return subl_deco
class TestPathutils(TestCase):
@subl_patch('libsass', 'pathutils')
def test_subpaths(self, pathutils):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@subl_patch('libsass', 'pathutils')
def test_grep_r(self, pathutils):
pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
Make custom patch in package to test
|
Make custom patch in package to test
|
Python
|
mit
|
blitzrk/sublime_libsass,blitzrk/sublime_libsass
|
- from os.path import join
+ from os.path import join, realpath
+ import os
import sublime
import sys
from unittest import TestCase
+ from functools import wraps
- version = sublime.version()
- try:
- from libsass import pathutils
+ def subl_patch(pkg, obj=None):
+ def subl_deco(fn):
+ @wraps(fn)
+ def wrap(*args):
+ nonlocal pkg
+ o = []
+ if obj != None:
+ o += [obj]
+ pkg = pkg + '.' + obj
+ try:
+ mock = __import__(pkg, globals(), locals(), o, 0)
- except ImportError:
+ except ImportError:
- from sublime_libsass.libsass import pathutils
+ pkg = realpath(__file__).split(os.sep)[-3] + '.' + pkg
+ mock = __import__(pkg, globals(), locals(), o, 0)
+ args += (mock,)
+ fn(*args)
+ return wrap
+ return subl_deco
class TestPathutils(TestCase):
+ @subl_patch('libsass', 'pathutils')
- def test_subpaths(self):
+ def test_subpaths(self, pathutils):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
+ @subl_patch('libsass', 'pathutils')
- def test_grep_r(self):
+ def test_grep_r(self, pathutils):
pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
Make custom patch in package to test
|
## Code Before:
from os.path import join
import sublime
import sys
from unittest import TestCase
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
def test_grep_r(self):
pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
## Instruction:
Make custom patch in package to test
## Code After:
from os.path import join, realpath
import os
import sublime
import sys
from unittest import TestCase
from functools import wraps
def subl_patch(pkg, obj=None):
def subl_deco(fn):
@wraps(fn)
def wrap(*args):
nonlocal pkg
o = []
if obj != None:
o += [obj]
pkg = pkg + '.' + obj
try:
mock = __import__(pkg, globals(), locals(), o, 0)
except ImportError:
pkg = realpath(__file__).split(os.sep)[-3] + '.' + pkg
mock = __import__(pkg, globals(), locals(), o, 0)
args += (mock,)
fn(*args)
return wrap
return subl_deco
class TestPathutils(TestCase):
@subl_patch('libsass', 'pathutils')
def test_subpaths(self, pathutils):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@subl_patch('libsass', 'pathutils')
def test_grep_r(self, pathutils):
pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
- from os.path import join
+ from os.path import join, realpath
? ++++++++++
+ import os
import sublime
import sys
from unittest import TestCase
+ from functools import wraps
- version = sublime.version()
- try:
- from libsass import pathutils
+ def subl_patch(pkg, obj=None):
+ def subl_deco(fn):
+ @wraps(fn)
+ def wrap(*args):
+ nonlocal pkg
+ o = []
+ if obj != None:
+ o += [obj]
+ pkg = pkg + '.' + obj
+ try:
+ mock = __import__(pkg, globals(), locals(), o, 0)
- except ImportError:
+ except ImportError:
? ++++++++++++
- from sublime_libsass.libsass import pathutils
+ pkg = realpath(__file__).split(os.sep)[-3] + '.' + pkg
+ mock = __import__(pkg, globals(), locals(), o, 0)
+ args += (mock,)
+ fn(*args)
+ return wrap
+ return subl_deco
class TestPathutils(TestCase):
+ @subl_patch('libsass', 'pathutils')
- def test_subpaths(self):
+ def test_subpaths(self, pathutils):
? +++++++++++
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
+ @subl_patch('libsass', 'pathutils')
- def test_grep_r(self):
+ def test_grep_r(self, pathutils):
? +++++++++++
pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
469d73255365392a821d701b4df9098d97b7546a
|
judge/toyojjudge/taskrunner.py
|
judge/toyojjudge/taskrunner.py
|
import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
logger.debug("Running %s", task)
lang = self.languages[task.submission.language_name]
check = self.checkers[task.testcase.checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task)
|
import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
language_name = task.submission.language_name
checker_name = task.testcase.checker_name
logger.info("Running %s, language %s, checker %s",
task, language_name, checker_name)
lang = self.languages[language_name]
check = self.checkers[checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task)
|
Print running task, language and checker as INFO
|
judge: Print running task, language and checker as INFO
|
Python
|
agpl-3.0
|
johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj
|
import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
- logger.debug("Running %s", task)
+ language_name = task.submission.language_name
+ checker_name = task.testcase.checker_name
+ logger.info("Running %s, language %s, checker %s",
+ task, language_name, checker_name)
- lang = self.languages[task.submission.language_name]
+ lang = self.languages[language_name]
- check = self.checkers[task.testcase.checker_name]
+ check = self.checkers[checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task)
|
Print running task, language and checker as INFO
|
## Code Before:
import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
logger.debug("Running %s", task)
lang = self.languages[task.submission.language_name]
check = self.checkers[task.testcase.checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task)
## Instruction:
Print running task, language and checker as INFO
## Code After:
import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
language_name = task.submission.language_name
checker_name = task.testcase.checker_name
logger.info("Running %s, language %s, checker %s",
task, language_name, checker_name)
lang = self.languages[language_name]
check = self.checkers[checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task)
|
import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
- logger.debug("Running %s", task)
+ language_name = task.submission.language_name
+ checker_name = task.testcase.checker_name
+ logger.info("Running %s, language %s, checker %s",
+ task, language_name, checker_name)
- lang = self.languages[task.submission.language_name]
? ----------------
+ lang = self.languages[language_name]
- check = self.checkers[task.testcase.checker_name]
? --------------
+ check = self.checkers[checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task)
|
21f06746eebe809f5d7017394b4c7c50ba319066
|
street_score/bulkadmin/forms.py
|
street_score/bulkadmin/forms.py
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField()
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField(help_text="""
<p>Select the CSV file to upload. The file should have a header for
each column you want to populate. When you have selected your
file, click the 'Upload' button below.</p>
""")
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
Add a help_text string to the admin form
|
Add a help_text string to the admin form
|
Python
|
mit
|
openplans/streetscore,openplans/streetscore,openplans/streetscore
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
- data = forms.FileField()
+ data = forms.FileField(help_text="""
+ <p>Select the CSV file to upload. The file should have a header for
+ each column you want to populate. When you have selected your
+ file, click the 'Upload' button below.</p>
+ """)
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
Add a help_text string to the admin form
|
## Code Before:
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField()
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
## Instruction:
Add a help_text string to the admin form
## Code After:
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField(help_text="""
<p>Select the CSV file to upload. The file should have a header for
each column you want to populate. When you have selected your
file, click the 'Upload' button below.</p>
""")
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
- data = forms.FileField()
? ^
+ data = forms.FileField(help_text="""
? ^^^^^^^^^^^^^
+ <p>Select the CSV file to upload. The file should have a header for
+ each column you want to populate. When you have selected your
+ file, click the 'Upload' button below.</p>
+ """)
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
9698e531ffd528b6b56e285f5cf8087aa06d4a02
|
test/conftest.py
|
test/conftest.py
|
import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
|
import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def namespace(namespaces):
return namespaces.Namespace
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
|
Add fixture for Namespace specifically.
|
Add fixture for Namespace specifically.
|
Python
|
mit
|
mwchase/class-namespaces,mwchase/class-namespaces
|
import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
+
+
+ @pytest.fixture
+ def namespace(namespaces):
+ return namespaces.Namespace
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
|
Add fixture for Namespace specifically.
|
## Code Before:
import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
## Instruction:
Add fixture for Namespace specifically.
## Code After:
import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def namespace(namespaces):
return namespaces.Namespace
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
|
import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
+
+
+ @pytest.fixture
+ def namespace(namespaces):
+ return namespaces.Namespace
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
|
5c341fc463840bc2e237e1529a43aa5915a70c77
|
luhn/luhn.py
|
luhn/luhn.py
|
def Luhn(card_number):
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd = digits[-1::-2]
even = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
def checksum(card_number):
return luhn(card_number)
|
def Luhn(card_number):
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd = digits[-1::-2]
even = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
for d in even_digits:
checksum += sum(digits_of(d*2))
return checksum % 10
def checksum(card_number):
return luhn(card_number)
|
Return the remainder of checksum
|
Return the remainder of checksum
|
Python
|
mit
|
amalshehu/exercism-python
|
def Luhn(card_number):
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd = digits[-1::-2]
even = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
+ for d in even_digits:
+ checksum += sum(digits_of(d*2))
+ return checksum % 10
def checksum(card_number):
return luhn(card_number)
|
Return the remainder of checksum
|
## Code Before:
def Luhn(card_number):
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd = digits[-1::-2]
even = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
def checksum(card_number):
return luhn(card_number)
## Instruction:
Return the remainder of checksum
## Code After:
def Luhn(card_number):
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd = digits[-1::-2]
even = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
for d in even_digits:
checksum += sum(digits_of(d*2))
return checksum % 10
def checksum(card_number):
return luhn(card_number)
|
def Luhn(card_number):
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd = digits[-1::-2]
even = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
+ for d in even_digits:
+ checksum += sum(digits_of(d*2))
+ return checksum % 10
def checksum(card_number):
return luhn(card_number)
|
e229779753f3c5f44319d882d19feab324abe119
|
api/migrations/0011_user_preferences_update_troposphere_user.py
|
api/migrations/0011_user_preferences_update_troposphere_user.py
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0010_sitemetadata_site_footer_link'),
]
operations = [
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''',
# reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED'''
# ),
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''',
# reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id'''
# ),
]
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0010_sitemetadata_site_footer_link'),
]
# These one-off operations are no longer necessary. The file was already executed in a production environment so it will stay as-is.
operations = [
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''',
# reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED'''
# ),
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''',
# reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id'''
# ),
]
|
Add note after testing on all three 'valid' environment formats: (Clean, CyVerse, Jetstream)
|
Add note after testing on all three 'valid' environment formats: (Clean, CyVerse, Jetstream)
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0010_sitemetadata_site_footer_link'),
]
+ # These one-off operations are no longer necessary. The file was already executed in a production environment so it will stay as-is.
operations = [
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''',
# reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED'''
# ),
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''',
# reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id'''
# ),
]
|
Add note after testing on all three 'valid' environment formats: (Clean, CyVerse, Jetstream)
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0010_sitemetadata_site_footer_link'),
]
operations = [
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''',
# reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED'''
# ),
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''',
# reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id'''
# ),
]
## Instruction:
Add note after testing on all three 'valid' environment formats: (Clean, CyVerse, Jetstream)
## Code After:
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0010_sitemetadata_site_footer_link'),
]
# These one-off operations are no longer necessary. The file was already executed in a production environment so it will stay as-is.
operations = [
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''',
# reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED'''
# ),
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''',
# reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id'''
# ),
]
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0010_sitemetadata_site_footer_link'),
]
+ # These one-off operations are no longer necessary. The file was already executed in a production environment so it will stay as-is.
operations = [
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''',
# reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED'''
# ),
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''',
# reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id'''
# ),
]
|
bc961992afeae978e95209606e0e7b1a9b73719f
|
jesusmtnez/python/kata/game.py
|
jesusmtnez/python/kata/game.py
|
class Game():
def __init__(self):
self._score = 0
def roll(self, pins):
pass
def score(self):
return 0
|
class Game():
def __init__(self):
self._score = 0
def roll(self, pins):
self._score += pins
def score(self):
return self._score
|
Update score in Game class methods
|
[Python] Update score in Game class methods
|
Python
|
mit
|
JesusMtnez/devexperto-challenge,JesusMtnez/devexperto-challenge
|
class Game():
def __init__(self):
self._score = 0
def roll(self, pins):
- pass
+ self._score += pins
def score(self):
- return 0
+ return self._score
|
Update score in Game class methods
|
## Code Before:
class Game():
def __init__(self):
self._score = 0
def roll(self, pins):
pass
def score(self):
return 0
## Instruction:
Update score in Game class methods
## Code After:
class Game():
def __init__(self):
self._score = 0
def roll(self, pins):
self._score += pins
def score(self):
return self._score
|
class Game():
def __init__(self):
self._score = 0
def roll(self, pins):
- pass
+ self._score += pins
def score(self):
- return 0
+ return self._score
|
54691f9be052e5564ca0e5c6a503e641ea3142e1
|
keras/layers/normalization.py
|
keras/layers/normalization.py
|
from ..layers.core import Layer
from ..utils.theano_utils import shared_zeros
from .. import initializations
class BatchNormalization(Layer):
'''
Reference:
Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift
http://arxiv.org/pdf/1502.03167v3.pdf
'''
def __init__(self, input_shape, epsilon=1e-6, weights=None):
self.init = initializations.get("uniform")
self.input_shape = input_shape
self.epsilon = epsilon
self.gamma = self.init((self.input_shape))
self.beta = shared_zeros(self.input_shape)
self.params = [self.gamma, self.beta]
if weights is not None:
self.set_weights(weights)
def output(self, train):
X = self.get_input(train)
X_normed = (X - X.mean(keepdims=True)) / (X.std(keepdims=True) + self.epsilon)
out = self.gamma * X_normed + self.beta
return out
def get_config(self):
return {"name":self.__class__.__name__,
"input_shape":self.input_shape,
"epsilon":self.epsilon}
|
from ..layers.core import Layer
from ..utils.theano_utils import shared_zeros
from .. import initializations
import theano.tensor as T
class BatchNormalization(Layer):
'''
Reference:
Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift
http://arxiv.org/pdf/1502.03167v3.pdf
mode: 0 -> featurewise normalization
1 -> samplewise normalization (may sometimes outperform featurewise mode)
'''
def __init__(self, input_shape, epsilon=1e-6, mode=0, weights=None):
self.init = initializations.get("uniform")
self.input_shape = input_shape
self.epsilon = epsilon
self.mode = mode
self.gamma = self.init((self.input_shape))
self.beta = shared_zeros(self.input_shape)
self.params = [self.gamma, self.beta]
if weights is not None:
self.set_weights(weights)
def output(self, train):
X = self.get_input(train)
if self.mode == 0:
m = X.mean(axis=0)
# manual computation of std to prevent NaNs
std = T.mean((X-m)**2 + self.epsilon, axis=0) ** 0.5
X_normed = (X - m) / (std + self.epsilon)
elif self.mode == 1:
m = X.mean(axis=-1, keepdims=True)
std = X.std(axis=-1, keepdims=True)
X_normed = (X - m) / (std + self.epsilon)
out = self.gamma * X_normed + self.beta
return out
def get_config(self):
return {"name":self.__class__.__name__,
"input_shape":self.input_shape,
"epsilon":self.epsilon,
"mode":self.mode}
|
Add modes to BatchNormalization, fix BN issues
|
Add modes to BatchNormalization, fix BN issues
|
Python
|
mit
|
yingzha/keras,imcomking/Convolutional-GRU-keras-extension-,jayhetee/keras,why11002526/keras,marchick209/keras,relh/keras,mikekestemont/keras,florentchandelier/keras,jbolinge/keras,tencrance/keras,meanmee/keras,kuza55/keras,nehz/keras,keras-team/keras,EderSantana/keras,kemaswill/keras,abayowbo/keras,dhruvparamhans/keras,cheng6076/keras,jasonyaw/keras,3dconv/keras,ogrisel/keras,marcelo-amancio/keras,jalexvig/keras,johmathe/keras,ekamioka/keras,stonebig/keras,Smerity/keras,dribnet/keras,Aureliu/keras,nt/keras,nebw/keras,ashhher3/keras,ypkang/keras,fmacias64/keras,gamer13/keras,harshhemani/keras,LIBOTAO/keras,rudaoshi/keras,danielforsyth/keras,hhaoyan/keras,jiumem/keras,Yingmin-Li/keras,iScienceLuvr/keras,dxj19831029/keras,aleju/keras,nzer0/keras,bottler/keras,printedheart/keras,wxs/keras,jfsantos/keras,ml-lab/keras,navyjeff/keras,wubr2000/keras,rlkelly/keras,zxytim/keras,gavinmh/keras,keras-team/keras,brainwater/keras,pthaike/keras,DeepGnosis/keras,pjadzinsky/keras,jonberliner/keras,rodrigob/keras,zxsted/keras,vseledkin/keras,asampat3090/keras,OlafLee/keras,daviddiazvico/keras,Cadene/keras,llcao/keras,sjuvekar/keras,zhmz90/keras,stephenbalaban/keras,dolaameng/keras,kfoss/keras,jimgoo/keras,iamtrask/keras,xiaoda99/keras,keskarnitish/keras,untom/keras,happyboy310/keras,bboalimoe/keras,DLlearn/keras,JasonTam/keras,kod3r/keras,MagicSen/keras,chenych11/keras,jmportilla/keras,jslhs/keras,xurantju/keras,saurav111/keras,zhangxujinsh/keras,amy12xx/keras,cmyr/keras,ledbetdr/keras,eulerreich/keras,cvfish/keras,jhauswald/keras
|
from ..layers.core import Layer
from ..utils.theano_utils import shared_zeros
from .. import initializations
+
+ import theano.tensor as T
class BatchNormalization(Layer):
'''
Reference:
Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift
http://arxiv.org/pdf/1502.03167v3.pdf
+
+ mode: 0 -> featurewise normalization
+ 1 -> samplewise normalization (may sometimes outperform featurewise mode)
'''
- def __init__(self, input_shape, epsilon=1e-6, weights=None):
+ def __init__(self, input_shape, epsilon=1e-6, mode=0, weights=None):
self.init = initializations.get("uniform")
self.input_shape = input_shape
self.epsilon = epsilon
+ self.mode = mode
self.gamma = self.init((self.input_shape))
self.beta = shared_zeros(self.input_shape)
self.params = [self.gamma, self.beta]
if weights is not None:
self.set_weights(weights)
def output(self, train):
X = self.get_input(train)
- X_normed = (X - X.mean(keepdims=True)) / (X.std(keepdims=True) + self.epsilon)
+
+ if self.mode == 0:
+ m = X.mean(axis=0)
+ # manual computation of std to prevent NaNs
+ std = T.mean((X-m)**2 + self.epsilon, axis=0) ** 0.5
+ X_normed = (X - m) / (std + self.epsilon)
+
+ elif self.mode == 1:
+ m = X.mean(axis=-1, keepdims=True)
+ std = X.std(axis=-1, keepdims=True)
+ X_normed = (X - m) / (std + self.epsilon)
+
out = self.gamma * X_normed + self.beta
return out
def get_config(self):
return {"name":self.__class__.__name__,
"input_shape":self.input_shape,
- "epsilon":self.epsilon}
+ "epsilon":self.epsilon,
+ "mode":self.mode}
+
|
Add modes to BatchNormalization, fix BN issues
|
## Code Before:
from ..layers.core import Layer
from ..utils.theano_utils import shared_zeros
from .. import initializations
class BatchNormalization(Layer):
'''
Reference:
Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift
http://arxiv.org/pdf/1502.03167v3.pdf
'''
def __init__(self, input_shape, epsilon=1e-6, weights=None):
self.init = initializations.get("uniform")
self.input_shape = input_shape
self.epsilon = epsilon
self.gamma = self.init((self.input_shape))
self.beta = shared_zeros(self.input_shape)
self.params = [self.gamma, self.beta]
if weights is not None:
self.set_weights(weights)
def output(self, train):
X = self.get_input(train)
X_normed = (X - X.mean(keepdims=True)) / (X.std(keepdims=True) + self.epsilon)
out = self.gamma * X_normed + self.beta
return out
def get_config(self):
return {"name":self.__class__.__name__,
"input_shape":self.input_shape,
"epsilon":self.epsilon}
## Instruction:
Add modes to BatchNormalization, fix BN issues
## Code After:
from ..layers.core import Layer
from ..utils.theano_utils import shared_zeros
from .. import initializations
import theano.tensor as T
class BatchNormalization(Layer):
'''
Reference:
Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift
http://arxiv.org/pdf/1502.03167v3.pdf
mode: 0 -> featurewise normalization
1 -> samplewise normalization (may sometimes outperform featurewise mode)
'''
def __init__(self, input_shape, epsilon=1e-6, mode=0, weights=None):
self.init = initializations.get("uniform")
self.input_shape = input_shape
self.epsilon = epsilon
self.mode = mode
self.gamma = self.init((self.input_shape))
self.beta = shared_zeros(self.input_shape)
self.params = [self.gamma, self.beta]
if weights is not None:
self.set_weights(weights)
def output(self, train):
X = self.get_input(train)
if self.mode == 0:
m = X.mean(axis=0)
# manual computation of std to prevent NaNs
std = T.mean((X-m)**2 + self.epsilon, axis=0) ** 0.5
X_normed = (X - m) / (std + self.epsilon)
elif self.mode == 1:
m = X.mean(axis=-1, keepdims=True)
std = X.std(axis=-1, keepdims=True)
X_normed = (X - m) / (std + self.epsilon)
out = self.gamma * X_normed + self.beta
return out
def get_config(self):
return {"name":self.__class__.__name__,
"input_shape":self.input_shape,
"epsilon":self.epsilon,
"mode":self.mode}
|
from ..layers.core import Layer
from ..utils.theano_utils import shared_zeros
from .. import initializations
+
+ import theano.tensor as T
class BatchNormalization(Layer):
'''
Reference:
Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift
http://arxiv.org/pdf/1502.03167v3.pdf
+
+ mode: 0 -> featurewise normalization
+ 1 -> samplewise normalization (may sometimes outperform featurewise mode)
'''
- def __init__(self, input_shape, epsilon=1e-6, weights=None):
+ def __init__(self, input_shape, epsilon=1e-6, mode=0, weights=None):
? ++++++++
self.init = initializations.get("uniform")
self.input_shape = input_shape
self.epsilon = epsilon
+ self.mode = mode
self.gamma = self.init((self.input_shape))
self.beta = shared_zeros(self.input_shape)
self.params = [self.gamma, self.beta]
if weights is not None:
self.set_weights(weights)
def output(self, train):
X = self.get_input(train)
- X_normed = (X - X.mean(keepdims=True)) / (X.std(keepdims=True) + self.epsilon)
+
+ if self.mode == 0:
+ m = X.mean(axis=0)
+ # manual computation of std to prevent NaNs
+ std = T.mean((X-m)**2 + self.epsilon, axis=0) ** 0.5
+ X_normed = (X - m) / (std + self.epsilon)
+
+ elif self.mode == 1:
+ m = X.mean(axis=-1, keepdims=True)
+ std = X.std(axis=-1, keepdims=True)
+ X_normed = (X - m) / (std + self.epsilon)
+
out = self.gamma * X_normed + self.beta
return out
def get_config(self):
return {"name":self.__class__.__name__,
"input_shape":self.input_shape,
- "epsilon":self.epsilon}
? ^
+ "epsilon":self.epsilon,
? ^
+ "mode":self.mode}
|
ee2e1727ece6b591b39752a1d3cd6a87d972226d
|
github3/search/code.py
|
github3/search/code.py
|
from github3.models import GitHubCore
from github3.repos import Repository
class CodeSearchResult(GitHubCore):
def __init__(self, data, session=None):
super(CodeSearchResult, self).__init__(data, session)
self._api = data.get('url')
#: Filename the match occurs in
self.name = data.get('name')
#: Path in the repository to the file
self.path = data.get('path')
#: SHA in which the code can be found
self.sha = data.get('sha')
#: URL to the Git blob endpoint
self.git_url = data.get('git_url')
#: URL to the HTML view of the blob
self.html_url = data.get('html_url')
#: Repository the code snippet belongs to
self.repository = Repository(data.get('repository', {}), self)
#: Score of the result
self.score = data.get('score')
#: Text matches
self.text_matches = data.get('text_matches', [])
|
from github3.models import GitHubCore
from github3.repos import Repository
class CodeSearchResult(GitHubCore):
def __init__(self, data, session=None):
super(CodeSearchResult, self).__init__(data, session)
self._api = data.get('url')
#: Filename the match occurs in
self.name = data.get('name')
#: Path in the repository to the file
self.path = data.get('path')
#: SHA in which the code can be found
self.sha = data.get('sha')
#: URL to the Git blob endpoint
self.git_url = data.get('git_url')
#: URL to the HTML view of the blob
self.html_url = data.get('html_url')
#: Repository the code snippet belongs to
self.repository = Repository(data.get('repository', {}), self)
#: Score of the result
self.score = data.get('score')
#: Text matches
self.text_matches = data.get('text_matches', [])
def __repr__(self):
return '<CodeSearchResult [{0}]>'.format(self.path)
|
Add a __repr__ for CodeSearchResult
|
Add a __repr__ for CodeSearchResult
|
Python
|
bsd-3-clause
|
h4ck3rm1k3/github3.py,ueg1990/github3.py,degustaf/github3.py,krxsky/github3.py,sigmavirus24/github3.py,itsmemattchung/github3.py,agamdua/github3.py,wbrefvem/github3.py,jim-minter/github3.py,icio/github3.py,christophelec/github3.py,balloob/github3.py
|
from github3.models import GitHubCore
from github3.repos import Repository
class CodeSearchResult(GitHubCore):
def __init__(self, data, session=None):
super(CodeSearchResult, self).__init__(data, session)
self._api = data.get('url')
#: Filename the match occurs in
self.name = data.get('name')
#: Path in the repository to the file
self.path = data.get('path')
#: SHA in which the code can be found
self.sha = data.get('sha')
#: URL to the Git blob endpoint
self.git_url = data.get('git_url')
#: URL to the HTML view of the blob
self.html_url = data.get('html_url')
#: Repository the code snippet belongs to
self.repository = Repository(data.get('repository', {}), self)
#: Score of the result
self.score = data.get('score')
#: Text matches
self.text_matches = data.get('text_matches', [])
+ def __repr__(self):
+ return '<CodeSearchResult [{0}]>'.format(self.path)
+
|
Add a __repr__ for CodeSearchResult
|
## Code Before:
from github3.models import GitHubCore
from github3.repos import Repository
class CodeSearchResult(GitHubCore):
def __init__(self, data, session=None):
super(CodeSearchResult, self).__init__(data, session)
self._api = data.get('url')
#: Filename the match occurs in
self.name = data.get('name')
#: Path in the repository to the file
self.path = data.get('path')
#: SHA in which the code can be found
self.sha = data.get('sha')
#: URL to the Git blob endpoint
self.git_url = data.get('git_url')
#: URL to the HTML view of the blob
self.html_url = data.get('html_url')
#: Repository the code snippet belongs to
self.repository = Repository(data.get('repository', {}), self)
#: Score of the result
self.score = data.get('score')
#: Text matches
self.text_matches = data.get('text_matches', [])
## Instruction:
Add a __repr__ for CodeSearchResult
## Code After:
from github3.models import GitHubCore
from github3.repos import Repository
class CodeSearchResult(GitHubCore):
def __init__(self, data, session=None):
super(CodeSearchResult, self).__init__(data, session)
self._api = data.get('url')
#: Filename the match occurs in
self.name = data.get('name')
#: Path in the repository to the file
self.path = data.get('path')
#: SHA in which the code can be found
self.sha = data.get('sha')
#: URL to the Git blob endpoint
self.git_url = data.get('git_url')
#: URL to the HTML view of the blob
self.html_url = data.get('html_url')
#: Repository the code snippet belongs to
self.repository = Repository(data.get('repository', {}), self)
#: Score of the result
self.score = data.get('score')
#: Text matches
self.text_matches = data.get('text_matches', [])
def __repr__(self):
return '<CodeSearchResult [{0}]>'.format(self.path)
|
from github3.models import GitHubCore
from github3.repos import Repository
class CodeSearchResult(GitHubCore):
def __init__(self, data, session=None):
super(CodeSearchResult, self).__init__(data, session)
self._api = data.get('url')
#: Filename the match occurs in
self.name = data.get('name')
#: Path in the repository to the file
self.path = data.get('path')
#: SHA in which the code can be found
self.sha = data.get('sha')
#: URL to the Git blob endpoint
self.git_url = data.get('git_url')
#: URL to the HTML view of the blob
self.html_url = data.get('html_url')
#: Repository the code snippet belongs to
self.repository = Repository(data.get('repository', {}), self)
#: Score of the result
self.score = data.get('score')
#: Text matches
self.text_matches = data.get('text_matches', [])
+
+ def __repr__(self):
+ return '<CodeSearchResult [{0}]>'.format(self.path)
|
5b8edee2b6fa13fc1b05e15961d8b7920e6f9739
|
csunplugged/general/urls.py
|
csunplugged/general/urls.py
|
"""URL routing for the general application."""
from django.conf.urls import url
from . import views
urlpatterns = [
url(r"^$", views.GeneralIndexView.as_view(), name="home"),
url(r"^about/$", views.GeneralAboutView.as_view(), name="about"),
url(r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking"),
url(r"^contact/$", views.GeneralContactView.as_view(), name="contact"),
url(r"^people/$", views.GeneralPeopleView.as_view(), name="people"),
url(r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles"),
]
|
"""URL routing for the general application."""
from django.conf.urls import url
from . import views
urlpatterns = [
url(
r"^$",
views.GeneralIndexView.as_view(),
name="home"
),
url(
r"^about/$",
views.GeneralAboutView.as_view(),
name="about"
),
url(
r"^computational-thinking/$",
views.ComputationalThinkingView.as_view(),
name="computational_thinking"
),
url(
r"^contact/$",
views.GeneralContactView.as_view(),
name="contact"
),
url(
r"^people/$",
views.GeneralPeopleView.as_view(),
name="people"
),
url(
r"^principles/$",
views.GeneralPrinciplesView.as_view(),
name="principles"
),
]
|
Update URLs file to match layout of other URLs files
|
Update URLs file to match layout of other URLs files
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
"""URL routing for the general application."""
from django.conf.urls import url
from . import views
urlpatterns = [
- url(r"^$", views.GeneralIndexView.as_view(), name="home"),
- url(r"^about/$", views.GeneralAboutView.as_view(), name="about"),
- url(r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking"),
- url(r"^contact/$", views.GeneralContactView.as_view(), name="contact"),
- url(r"^people/$", views.GeneralPeopleView.as_view(), name="people"),
- url(r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles"),
+ url(
+ r"^$",
+ views.GeneralIndexView.as_view(),
+ name="home"
+ ),
+ url(
+ r"^about/$",
+ views.GeneralAboutView.as_view(),
+ name="about"
+ ),
+ url(
+ r"^computational-thinking/$",
+ views.ComputationalThinkingView.as_view(),
+ name="computational_thinking"
+ ),
+ url(
+ r"^contact/$",
+ views.GeneralContactView.as_view(),
+ name="contact"
+ ),
+ url(
+ r"^people/$",
+ views.GeneralPeopleView.as_view(),
+ name="people"
+ ),
+ url(
+ r"^principles/$",
+ views.GeneralPrinciplesView.as_view(),
+ name="principles"
+ ),
]
|
Update URLs file to match layout of other URLs files
|
## Code Before:
"""URL routing for the general application."""
from django.conf.urls import url
from . import views
urlpatterns = [
url(r"^$", views.GeneralIndexView.as_view(), name="home"),
url(r"^about/$", views.GeneralAboutView.as_view(), name="about"),
url(r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking"),
url(r"^contact/$", views.GeneralContactView.as_view(), name="contact"),
url(r"^people/$", views.GeneralPeopleView.as_view(), name="people"),
url(r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles"),
]
## Instruction:
Update URLs file to match layout of other URLs files
## Code After:
"""URL routing for the general application."""
from django.conf.urls import url
from . import views
urlpatterns = [
url(
r"^$",
views.GeneralIndexView.as_view(),
name="home"
),
url(
r"^about/$",
views.GeneralAboutView.as_view(),
name="about"
),
url(
r"^computational-thinking/$",
views.ComputationalThinkingView.as_view(),
name="computational_thinking"
),
url(
r"^contact/$",
views.GeneralContactView.as_view(),
name="contact"
),
url(
r"^people/$",
views.GeneralPeopleView.as_view(),
name="people"
),
url(
r"^principles/$",
views.GeneralPrinciplesView.as_view(),
name="principles"
),
]
|
"""URL routing for the general application."""
from django.conf.urls import url
from . import views
urlpatterns = [
- url(r"^$", views.GeneralIndexView.as_view(), name="home"),
- url(r"^about/$", views.GeneralAboutView.as_view(), name="about"),
- url(r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking"),
- url(r"^contact/$", views.GeneralContactView.as_view(), name="contact"),
- url(r"^people/$", views.GeneralPeopleView.as_view(), name="people"),
- url(r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles"),
+ url(
+ r"^$",
+ views.GeneralIndexView.as_view(),
+ name="home"
+ ),
+ url(
+ r"^about/$",
+ views.GeneralAboutView.as_view(),
+ name="about"
+ ),
+ url(
+ r"^computational-thinking/$",
+ views.ComputationalThinkingView.as_view(),
+ name="computational_thinking"
+ ),
+ url(
+ r"^contact/$",
+ views.GeneralContactView.as_view(),
+ name="contact"
+ ),
+ url(
+ r"^people/$",
+ views.GeneralPeopleView.as_view(),
+ name="people"
+ ),
+ url(
+ r"^principles/$",
+ views.GeneralPrinciplesView.as_view(),
+ name="principles"
+ ),
]
|
ffa6417b30517569cadff00aec839d968f3c91d7
|
bisnode/constants.py
|
bisnode/constants.py
|
COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
|
COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
NEW = 'AN'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(NEW, "new"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
|
Add a new rating code
|
Add a new rating code
|
Python
|
mit
|
FundedByMe/django-bisnode
|
COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
+ NEW = 'AN'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
+ (NEW, "new"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
|
Add a new rating code
|
## Code Before:
COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
## Instruction:
Add a new rating code
## Code After:
COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
NEW = 'AN'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(NEW, "new"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
|
COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
+ NEW = 'AN'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
+ (NEW, "new"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
|
9dc253b79d885ca205b557f88fca6fa35bd8fe21
|
tests/test_selector.py
|
tests/test_selector.py
|
from contextlib import contextmanager
from scell import Selector
from pytest import raises, fixture
def test_select(selector):
res = list(selector.select())
assert res
for event in res:
assert event.ready
def test_select_empty():
sel = Selector()
assert list(sel.select()) == []
def test_unregister(selector):
for fp in list(selector):
selector.unregister(fp)
assert not selector
def test_info(selector):
for fp in selector:
assert selector.info(fp).wants_read
assert selector.info(0) is None
def test_callbacks(selector):
res = selector.select()
exp = len(selector)
assert sum(m.callback() for m in res) == exp
def test_ready(selector):
ready = list(selector.ready())
assert ready
for event in ready:
assert event.ready
class TestScoped(object):
@fixture
def sel(self):
return Selector()
def test_peaceful(self, sel, handles):
with sel.scoped(handles) as monitors:
r = list(sel.ready())
for ev in r:
assert ev.monitored in monitors
assert ev.fp in handles
assert r
assert not sel
def test_exception(self, sel, handles):
with raises(NameError):
with sel.scoped(handles) as _:
raise NameError
assert not sel
|
from contextlib import contextmanager
from scell import Selector
from pytest import raises, fixture
def test_select(selector):
res = list(selector.select())
assert res
for event in res:
assert event.ready
def test_select_empty():
sel = Selector()
assert list(sel.select()) == []
def test_unregister(selector):
for fp in list(selector):
selector.unregister(fp)
assert not selector
def test_info(selector):
for fp in selector:
assert selector.info(fp).wants_read
assert selector.info(0) is None
def test_callbacks(selector):
res = selector.select()
exp = len(selector)
assert sum(m.callback() for m in res) == exp
def test_ready(selector):
ready = list(selector.ready())
assert ready
for event in ready:
assert event.ready
class TestScoped(object):
@fixture
def sel(self):
return Selector()
def test_peaceful(self, sel, handles):
with sel.scoped(handles) as monitors:
r = set(k.fp for k in sel.ready())
assert r == set(handles)
assert not sel
def test_exception(self, sel, handles):
with raises(NameError):
with sel.scoped(handles) as _:
raise NameError
assert not sel
|
Make Selector.scope test more rigorous
|
Make Selector.scope test more rigorous
|
Python
|
mit
|
eugene-eeo/scell
|
from contextlib import contextmanager
from scell import Selector
from pytest import raises, fixture
def test_select(selector):
res = list(selector.select())
assert res
for event in res:
assert event.ready
def test_select_empty():
sel = Selector()
assert list(sel.select()) == []
def test_unregister(selector):
for fp in list(selector):
selector.unregister(fp)
assert not selector
def test_info(selector):
for fp in selector:
assert selector.info(fp).wants_read
assert selector.info(0) is None
def test_callbacks(selector):
res = selector.select()
exp = len(selector)
assert sum(m.callback() for m in res) == exp
def test_ready(selector):
ready = list(selector.ready())
assert ready
for event in ready:
assert event.ready
class TestScoped(object):
@fixture
def sel(self):
return Selector()
def test_peaceful(self, sel, handles):
with sel.scoped(handles) as monitors:
- r = list(sel.ready())
+ r = set(k.fp for k in sel.ready())
+ assert r == set(handles)
- for ev in r:
- assert ev.monitored in monitors
- assert ev.fp in handles
- assert r
assert not sel
def test_exception(self, sel, handles):
with raises(NameError):
with sel.scoped(handles) as _:
raise NameError
assert not sel
|
Make Selector.scope test more rigorous
|
## Code Before:
from contextlib import contextmanager
from scell import Selector
from pytest import raises, fixture
def test_select(selector):
res = list(selector.select())
assert res
for event in res:
assert event.ready
def test_select_empty():
sel = Selector()
assert list(sel.select()) == []
def test_unregister(selector):
for fp in list(selector):
selector.unregister(fp)
assert not selector
def test_info(selector):
for fp in selector:
assert selector.info(fp).wants_read
assert selector.info(0) is None
def test_callbacks(selector):
res = selector.select()
exp = len(selector)
assert sum(m.callback() for m in res) == exp
def test_ready(selector):
ready = list(selector.ready())
assert ready
for event in ready:
assert event.ready
class TestScoped(object):
@fixture
def sel(self):
return Selector()
def test_peaceful(self, sel, handles):
with sel.scoped(handles) as monitors:
r = list(sel.ready())
for ev in r:
assert ev.monitored in monitors
assert ev.fp in handles
assert r
assert not sel
def test_exception(self, sel, handles):
with raises(NameError):
with sel.scoped(handles) as _:
raise NameError
assert not sel
## Instruction:
Make Selector.scope test more rigorous
## Code After:
from contextlib import contextmanager
from scell import Selector
from pytest import raises, fixture
def test_select(selector):
res = list(selector.select())
assert res
for event in res:
assert event.ready
def test_select_empty():
sel = Selector()
assert list(sel.select()) == []
def test_unregister(selector):
for fp in list(selector):
selector.unregister(fp)
assert not selector
def test_info(selector):
for fp in selector:
assert selector.info(fp).wants_read
assert selector.info(0) is None
def test_callbacks(selector):
res = selector.select()
exp = len(selector)
assert sum(m.callback() for m in res) == exp
def test_ready(selector):
ready = list(selector.ready())
assert ready
for event in ready:
assert event.ready
class TestScoped(object):
@fixture
def sel(self):
return Selector()
def test_peaceful(self, sel, handles):
with sel.scoped(handles) as monitors:
r = set(k.fp for k in sel.ready())
assert r == set(handles)
assert not sel
def test_exception(self, sel, handles):
with raises(NameError):
with sel.scoped(handles) as _:
raise NameError
assert not sel
|
from contextlib import contextmanager
from scell import Selector
from pytest import raises, fixture
def test_select(selector):
res = list(selector.select())
assert res
for event in res:
assert event.ready
def test_select_empty():
sel = Selector()
assert list(sel.select()) == []
def test_unregister(selector):
for fp in list(selector):
selector.unregister(fp)
assert not selector
def test_info(selector):
for fp in selector:
assert selector.info(fp).wants_read
assert selector.info(0) is None
def test_callbacks(selector):
res = selector.select()
exp = len(selector)
assert sum(m.callback() for m in res) == exp
def test_ready(selector):
ready = list(selector.ready())
assert ready
for event in ready:
assert event.ready
class TestScoped(object):
@fixture
def sel(self):
return Selector()
def test_peaceful(self, sel, handles):
with sel.scoped(handles) as monitors:
- r = list(sel.ready())
? --
+ r = set(k.fp for k in sel.ready())
? + ++++++++++++++
+ assert r == set(handles)
- for ev in r:
- assert ev.monitored in monitors
- assert ev.fp in handles
- assert r
assert not sel
def test_exception(self, sel, handles):
with raises(NameError):
with sel.scoped(handles) as _:
raise NameError
assert not sel
|
8b1516e638244824b1eafed7dc4abb2dc087ec74
|
nuts/nuts.py
|
nuts/nuts.py
|
import os
import sys
import argparse
import logging
import datetime
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
import os
import sys
import argparse
import logging
import datetime
import colorama
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
colorama.init()
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
Add colorama for coloring on windows
|
Add colorama for coloring on windows
Add the module colorama that makes ANSI escape character sequences work under MS Windows. The coloring is used to give a better overview about the testresults
|
Python
|
mit
|
HSRNetwork/Nuts
|
import os
import sys
import argparse
import logging
import datetime
+ import colorama
from src.application.Logger import Logger
-
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
+ colorama.init()
+
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
-
+
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
Add colorama for coloring on windows
|
## Code Before:
import os
import sys
import argparse
import logging
import datetime
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
## Instruction:
Add colorama for coloring on windows
## Code After:
import os
import sys
import argparse
import logging
import datetime
import colorama
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
colorama.init()
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
import os
import sys
import argparse
import logging
import datetime
+ import colorama
from src.application.Logger import Logger
-
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
+ colorama.init()
+
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
-
+
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
f7e2bcf941e2a15a3bc28ebf3f15244df6f0d758
|
posts/versatileimagefield.py
|
posts/versatileimagefield.py
|
from django.conf import settings
from versatileimagefield.datastructures.filteredimage import FilteredImage
from versatileimagefield.registry import versatileimagefield_registry
from PIL import Image, ImageDraw, ImageFont
from io import BytesIO
class Watermark(FilteredImage):
def process_image(self, image, image_format, save_kwargs={}):
"""
Returns a BytesIO instance of `image` with inverted colors
"""
if image.mode != 'RGBA':
image = image.convert('RGBA')
txt = Image.new('RGBA', image.size, (255,255,255,0))
fontsize = int(image.size[1] * 0.1)
# get a font
fnt = ImageFont.truetype(
'/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf',
fontsize,
)
# get a drawing context
d = ImageDraw.Draw(txt)
# draw text, half opacity
d.text(
(10, image.size[1] - 10 - fontsize),
settings.WATERMARK_TEXT,
font=fnt,
fill=(255,255,255,30)
)
out = Image.alpha_composite(image, txt)
out = out.convert('RGB')
imagefile = BytesIO()
out.save(
imagefile,
**save_kwargs
)
return imagefile
versatileimagefield_registry.register_filter('watermark', Watermark)
|
import os.path
from django.conf import settings
from versatileimagefield.datastructures.filteredimage import FilteredImage
from versatileimagefield.registry import versatileimagefield_registry
from PIL import Image, ImageDraw, ImageFont
from io import BytesIO
class Watermark(FilteredImage):
def process_image(self, image, image_format, save_kwargs={}):
"""
Returns a BytesIO instance of `image` with inverted colors
"""
if image.mode != 'RGBA':
image = image.convert('RGBA')
txt = Image.new('RGBA', image.size, (255,255,255,0))
height = image.size[1]
fontsize = int(image.size[1] * 0.1)
# get a font
fnt = ImageFont.truetype(
os.path.join(
os.path.dirname(os.path.dirname(__file__)),
'font', 'conthrax-sb.ttf'
),
fontsize,
)
# get a drawing context
d = ImageDraw.Draw(txt)
# draw text, half opacity
d.text(
(10 + fontsize * .2, height - 10 - fontsize - fontsize * .2),
settings.WATERMARK_TEXT,
font=fnt,
fill=(255,255,255,30)
)
out = Image.alpha_composite(image, txt)
out = out.convert('RGB')
imagefile = BytesIO()
out.save(
imagefile,
**save_kwargs
)
return imagefile
versatileimagefield_registry.register_filter('watermark', Watermark)
|
Use custom font for watermark
|
Use custom font for watermark
Signed-off-by: Michal Čihař <[email protected]>
|
Python
|
agpl-3.0
|
nijel/photoblog,nijel/photoblog
|
+ import os.path
from django.conf import settings
from versatileimagefield.datastructures.filteredimage import FilteredImage
from versatileimagefield.registry import versatileimagefield_registry
from PIL import Image, ImageDraw, ImageFont
from io import BytesIO
class Watermark(FilteredImage):
def process_image(self, image, image_format, save_kwargs={}):
"""
Returns a BytesIO instance of `image` with inverted colors
"""
if image.mode != 'RGBA':
image = image.convert('RGBA')
txt = Image.new('RGBA', image.size, (255,255,255,0))
+ height = image.size[1]
fontsize = int(image.size[1] * 0.1)
# get a font
fnt = ImageFont.truetype(
- '/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf',
+ os.path.join(
+ os.path.dirname(os.path.dirname(__file__)),
+ 'font', 'conthrax-sb.ttf'
+ ),
fontsize,
)
# get a drawing context
d = ImageDraw.Draw(txt)
# draw text, half opacity
d.text(
- (10, image.size[1] - 10 - fontsize),
+ (10 + fontsize * .2, height - 10 - fontsize - fontsize * .2),
settings.WATERMARK_TEXT,
font=fnt,
fill=(255,255,255,30)
)
out = Image.alpha_composite(image, txt)
out = out.convert('RGB')
imagefile = BytesIO()
out.save(
imagefile,
**save_kwargs
)
return imagefile
versatileimagefield_registry.register_filter('watermark', Watermark)
|
Use custom font for watermark
|
## Code Before:
from django.conf import settings
from versatileimagefield.datastructures.filteredimage import FilteredImage
from versatileimagefield.registry import versatileimagefield_registry
from PIL import Image, ImageDraw, ImageFont
from io import BytesIO
class Watermark(FilteredImage):
def process_image(self, image, image_format, save_kwargs={}):
"""
Returns a BytesIO instance of `image` with inverted colors
"""
if image.mode != 'RGBA':
image = image.convert('RGBA')
txt = Image.new('RGBA', image.size, (255,255,255,0))
fontsize = int(image.size[1] * 0.1)
# get a font
fnt = ImageFont.truetype(
'/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf',
fontsize,
)
# get a drawing context
d = ImageDraw.Draw(txt)
# draw text, half opacity
d.text(
(10, image.size[1] - 10 - fontsize),
settings.WATERMARK_TEXT,
font=fnt,
fill=(255,255,255,30)
)
out = Image.alpha_composite(image, txt)
out = out.convert('RGB')
imagefile = BytesIO()
out.save(
imagefile,
**save_kwargs
)
return imagefile
versatileimagefield_registry.register_filter('watermark', Watermark)
## Instruction:
Use custom font for watermark
## Code After:
import os.path
from django.conf import settings
from versatileimagefield.datastructures.filteredimage import FilteredImage
from versatileimagefield.registry import versatileimagefield_registry
from PIL import Image, ImageDraw, ImageFont
from io import BytesIO
class Watermark(FilteredImage):
def process_image(self, image, image_format, save_kwargs={}):
"""
Returns a BytesIO instance of `image` with inverted colors
"""
if image.mode != 'RGBA':
image = image.convert('RGBA')
txt = Image.new('RGBA', image.size, (255,255,255,0))
height = image.size[1]
fontsize = int(image.size[1] * 0.1)
# get a font
fnt = ImageFont.truetype(
os.path.join(
os.path.dirname(os.path.dirname(__file__)),
'font', 'conthrax-sb.ttf'
),
fontsize,
)
# get a drawing context
d = ImageDraw.Draw(txt)
# draw text, half opacity
d.text(
(10 + fontsize * .2, height - 10 - fontsize - fontsize * .2),
settings.WATERMARK_TEXT,
font=fnt,
fill=(255,255,255,30)
)
out = Image.alpha_composite(image, txt)
out = out.convert('RGB')
imagefile = BytesIO()
out.save(
imagefile,
**save_kwargs
)
return imagefile
versatileimagefield_registry.register_filter('watermark', Watermark)
|
+ import os.path
from django.conf import settings
from versatileimagefield.datastructures.filteredimage import FilteredImage
from versatileimagefield.registry import versatileimagefield_registry
from PIL import Image, ImageDraw, ImageFont
from io import BytesIO
class Watermark(FilteredImage):
def process_image(self, image, image_format, save_kwargs={}):
"""
Returns a BytesIO instance of `image` with inverted colors
"""
if image.mode != 'RGBA':
image = image.convert('RGBA')
txt = Image.new('RGBA', image.size, (255,255,255,0))
+ height = image.size[1]
fontsize = int(image.size[1] * 0.1)
# get a font
fnt = ImageFont.truetype(
- '/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf',
+ os.path.join(
+ os.path.dirname(os.path.dirname(__file__)),
+ 'font', 'conthrax-sb.ttf'
+ ),
fontsize,
)
# get a drawing context
d = ImageDraw.Draw(txt)
# draw text, half opacity
d.text(
- (10, image.size[1] - 10 - fontsize),
+ (10 + fontsize * .2, height - 10 - fontsize - fontsize * .2),
settings.WATERMARK_TEXT,
font=fnt,
fill=(255,255,255,30)
)
out = Image.alpha_composite(image, txt)
out = out.convert('RGB')
imagefile = BytesIO()
out.save(
imagefile,
**save_kwargs
)
return imagefile
versatileimagefield_registry.register_filter('watermark', Watermark)
|
18f3cd10d07467eb9770ffe52b3d5b007f6967fe
|
cupy/array_api/_typing.py
|
cupy/array_api/_typing.py
|
from cupy.cuda import Device as _Device
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = _Device
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
from cupy.cuda import Device as _Device
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
_T = TypeVar("_T")
NestedSequence = Sequence[Sequence[_T]]
Device = _Device
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
Add a missing subscription slot to `NestedSequence`
|
MAINT: Add a missing subscription slot to `NestedSequence`
|
Python
|
mit
|
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
|
from cupy.cuda import Device as _Device
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
- from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
+ from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
+ _T = TypeVar("_T")
- NestedSequence = Sequence[Sequence[Any]]
+ NestedSequence = Sequence[Sequence[_T]]
Device = _Device
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
Add a missing subscription slot to `NestedSequence`
|
## Code Before:
from cupy.cuda import Device as _Device
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = _Device
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
## Instruction:
Add a missing subscription slot to `NestedSequence`
## Code After:
from cupy.cuda import Device as _Device
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
_T = TypeVar("_T")
NestedSequence = Sequence[Sequence[_T]]
Device = _Device
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
from cupy.cuda import Device as _Device
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
- from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
+ from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
? +++++++++
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
+ _T = TypeVar("_T")
- NestedSequence = Sequence[Sequence[Any]]
? ^^^
+ NestedSequence = Sequence[Sequence[_T]]
? ^^
Device = _Device
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
9fa562a413900252acd27d6f1b90055df2e95fe2
|
tests/test_apply.py
|
tests/test_apply.py
|
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_local(self):
g = {'AttrSettings': AttrSettings}
cbs.apply('AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_local(self):
self.assertRaises(ValueError, cbs.apply, 'LocalSettings', {})
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
Test all the code paths
|
Test all the code paths
|
Python
|
bsd-2-clause
|
ar45/django-classy-settings,pombredanne/django-classy-settings,tysonclugg/django-classy-settings,funkybob/django-classy-settings
|
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
+ def test_apply_settings_string_local(self):
+ g = {'AttrSettings': AttrSettings}
+ cbs.apply('AttrSettings', g)
+
+ self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
+
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
+ def test_apply_settings_invalid_string_local(self):
+ self.assertRaises(ValueError, cbs.apply, 'LocalSettings', {})
+
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
Test all the code paths
|
## Code Before:
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
## Instruction:
Test all the code paths
## Code After:
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_local(self):
g = {'AttrSettings': AttrSettings}
cbs.apply('AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_local(self):
self.assertRaises(ValueError, cbs.apply, 'LocalSettings', {})
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
+ def test_apply_settings_string_local(self):
+ g = {'AttrSettings': AttrSettings}
+ cbs.apply('AttrSettings', g)
+
+ self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
+
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
+ def test_apply_settings_invalid_string_local(self):
+ self.assertRaises(ValueError, cbs.apply, 'LocalSettings', {})
+
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
757df7c04d862feb9067ae52c83875fc2e3aedf8
|
cla_backend/apps/cla_provider/admin/base.py
|
cla_backend/apps/cla_provider/admin/base.py
|
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(ProviderAllocation)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
Disable ProviderAllocation admin page, still accessible from Provider Inlines
|
Disable ProviderAllocation admin page, still accessible from Provider Inlines
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
- admin.site.register(ProviderAllocation)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
Disable ProviderAllocation admin page, still accessible from Provider Inlines
|
## Code Before:
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(ProviderAllocation)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
## Instruction:
Disable ProviderAllocation admin page, still accessible from Provider Inlines
## Code After:
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
- admin.site.register(ProviderAllocation)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
376fcbd76bb2f0de3c738ac66ac5526b6685d18a
|
plim/extensions.py
|
plim/extensions.py
|
from docutils.core import publish_parts
import coffeescript
from scss import Scss
from stylus import Stylus
from .util import as_unicode
def rst_to_html(source):
# This code was taken from http://wiki.python.org/moin/ReStructuredText
# You may also be interested in http://www.tele3.cz/jbar/rest/about.html
html = publish_parts(source=source, writer_name='html')
return html['html_body']
def coffee_to_js(source):
return as_unicode('<script>{js}</script>').format(js=coffeescript.compile(source))
def scss_to_css(source):
css = Scss().compile(source).strip()
return as_unicode('<style>{css}</style>').format(css=css)
def stylus_to_css(source):
compiler = Stylus(plugins={'nib':{}})
return as_unicode('<style>{css}</style>').format(css=compiler.compile(source).strip())
|
from docutils.core import publish_parts
import coffeescript
from scss import Scss
from stylus import Stylus
from .util import as_unicode
def rst_to_html(source):
# This code was taken from http://wiki.python.org/moin/ReStructuredText
# You may also be interested in http://www.tele3.cz/jbar/rest/about.html
html = publish_parts(source=source, writer_name='html')
return html['html_body']
def coffee_to_js(source):
return as_unicode('<script>{js}</script>').format(js=coffeescript.compile(source))
def scss_to_css(source):
css = Scss().compile(source).strip()
return as_unicode('<style>{css}</style>').format(css=css)
def stylus_to_css(source):
compiler = Stylus()
return as_unicode('<style>{css}</style>').format(css=compiler.compile(source).strip())
|
Fix execjs.ProgramError: Error: Cannot find module 'nib' for -stylus
|
Fix execjs.ProgramError: Error: Cannot find module 'nib' for -stylus
|
Python
|
mit
|
kxxoling/Plim
|
from docutils.core import publish_parts
import coffeescript
from scss import Scss
from stylus import Stylus
from .util import as_unicode
def rst_to_html(source):
# This code was taken from http://wiki.python.org/moin/ReStructuredText
# You may also be interested in http://www.tele3.cz/jbar/rest/about.html
html = publish_parts(source=source, writer_name='html')
return html['html_body']
def coffee_to_js(source):
return as_unicode('<script>{js}</script>').format(js=coffeescript.compile(source))
def scss_to_css(source):
css = Scss().compile(source).strip()
return as_unicode('<style>{css}</style>').format(css=css)
def stylus_to_css(source):
- compiler = Stylus(plugins={'nib':{}})
+ compiler = Stylus()
return as_unicode('<style>{css}</style>').format(css=compiler.compile(source).strip())
|
Fix execjs.ProgramError: Error: Cannot find module 'nib' for -stylus
|
## Code Before:
from docutils.core import publish_parts
import coffeescript
from scss import Scss
from stylus import Stylus
from .util import as_unicode
def rst_to_html(source):
# This code was taken from http://wiki.python.org/moin/ReStructuredText
# You may also be interested in http://www.tele3.cz/jbar/rest/about.html
html = publish_parts(source=source, writer_name='html')
return html['html_body']
def coffee_to_js(source):
return as_unicode('<script>{js}</script>').format(js=coffeescript.compile(source))
def scss_to_css(source):
css = Scss().compile(source).strip()
return as_unicode('<style>{css}</style>').format(css=css)
def stylus_to_css(source):
compiler = Stylus(plugins={'nib':{}})
return as_unicode('<style>{css}</style>').format(css=compiler.compile(source).strip())
## Instruction:
Fix execjs.ProgramError: Error: Cannot find module 'nib' for -stylus
## Code After:
from docutils.core import publish_parts
import coffeescript
from scss import Scss
from stylus import Stylus
from .util import as_unicode
def rst_to_html(source):
# This code was taken from http://wiki.python.org/moin/ReStructuredText
# You may also be interested in http://www.tele3.cz/jbar/rest/about.html
html = publish_parts(source=source, writer_name='html')
return html['html_body']
def coffee_to_js(source):
return as_unicode('<script>{js}</script>').format(js=coffeescript.compile(source))
def scss_to_css(source):
css = Scss().compile(source).strip()
return as_unicode('<style>{css}</style>').format(css=css)
def stylus_to_css(source):
compiler = Stylus()
return as_unicode('<style>{css}</style>').format(css=compiler.compile(source).strip())
|
from docutils.core import publish_parts
import coffeescript
from scss import Scss
from stylus import Stylus
from .util import as_unicode
def rst_to_html(source):
# This code was taken from http://wiki.python.org/moin/ReStructuredText
# You may also be interested in http://www.tele3.cz/jbar/rest/about.html
html = publish_parts(source=source, writer_name='html')
return html['html_body']
def coffee_to_js(source):
return as_unicode('<script>{js}</script>').format(js=coffeescript.compile(source))
def scss_to_css(source):
css = Scss().compile(source).strip()
return as_unicode('<style>{css}</style>').format(css=css)
def stylus_to_css(source):
- compiler = Stylus(plugins={'nib':{}})
+ compiler = Stylus()
return as_unicode('<style>{css}</style>').format(css=compiler.compile(source).strip())
|
1f6ba483902c59dc70d15ea1e33957ac6a874f01
|
freesound_datasets/local_settings.example.py
|
freesound_datasets/local_settings.example.py
|
FS_CLIENT_ID = 'FREESOUND_KEY'
FS_CLIENT_SECRET = 'FREESOUND_SECRET'
# Freesound keys for "login with" functionality
# Get credentials at http://www.freesound.org/apiv2/apply
# Set callback url to http://localhost:8000/social/complete/freesound/
SOCIAL_AUTH_FREESOUND_KEY = None
SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET'
# Google keys for "login with" functionality
# Get credentials at https://console.developers.google.com
# Set callback url to http://localhost:8000/social/complete/google-oauth2/
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = None # (remove the part starting with the dot .)
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = 'GOOGLE_SECRET'
# Facebook keys for "login with" functionality
# See instructions in https://simpleisbetterthancomplex.com/tutorial/2016/10/24/how-to-add-social-login-to-django.html
# NOTE: might not work in localhost
SOCIAL_AUTH_FACEBOOK_KEY = None
SOCIAL_AUTH_FACEBOOK_SECRET = 'FACEBOOK_SECRET'
# Github keys for "login with" functionality
# Get credentials at https://github.com/settings/applications/new
# Set callback url to http://localhost:8000/social/complete/github/
SOCIAL_AUTH_GITHUB_KEY = None
SOCIAL_AUTH_GITHUB_SECRET = 'GITHUB_SECRET'
|
FS_CLIENT_ID = 'FREESOUND_KEY'
FS_CLIENT_SECRET = 'FREESOUND_SECRET'
# Freesound keys for "login with" functionality
# Get credentials at http://www.freesound.org/apiv2/apply
# Set callback url to http://localhost:8000/social/complete/freesound/
SOCIAL_AUTH_FREESOUND_KEY = None
SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET'
|
Remove unused social auth keys
|
Remove unused social auth keys
|
Python
|
agpl-3.0
|
MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets
|
FS_CLIENT_ID = 'FREESOUND_KEY'
FS_CLIENT_SECRET = 'FREESOUND_SECRET'
# Freesound keys for "login with" functionality
# Get credentials at http://www.freesound.org/apiv2/apply
# Set callback url to http://localhost:8000/social/complete/freesound/
SOCIAL_AUTH_FREESOUND_KEY = None
SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET'
- # Google keys for "login with" functionality
- # Get credentials at https://console.developers.google.com
- # Set callback url to http://localhost:8000/social/complete/google-oauth2/
- SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = None # (remove the part starting with the dot .)
- SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = 'GOOGLE_SECRET'
-
- # Facebook keys for "login with" functionality
- # See instructions in https://simpleisbetterthancomplex.com/tutorial/2016/10/24/how-to-add-social-login-to-django.html
- # NOTE: might not work in localhost
- SOCIAL_AUTH_FACEBOOK_KEY = None
- SOCIAL_AUTH_FACEBOOK_SECRET = 'FACEBOOK_SECRET'
-
- # Github keys for "login with" functionality
- # Get credentials at https://github.com/settings/applications/new
- # Set callback url to http://localhost:8000/social/complete/github/
- SOCIAL_AUTH_GITHUB_KEY = None
- SOCIAL_AUTH_GITHUB_SECRET = 'GITHUB_SECRET'
-
|
Remove unused social auth keys
|
## Code Before:
FS_CLIENT_ID = 'FREESOUND_KEY'
FS_CLIENT_SECRET = 'FREESOUND_SECRET'
# Freesound keys for "login with" functionality
# Get credentials at http://www.freesound.org/apiv2/apply
# Set callback url to http://localhost:8000/social/complete/freesound/
SOCIAL_AUTH_FREESOUND_KEY = None
SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET'
# Google keys for "login with" functionality
# Get credentials at https://console.developers.google.com
# Set callback url to http://localhost:8000/social/complete/google-oauth2/
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = None # (remove the part starting with the dot .)
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = 'GOOGLE_SECRET'
# Facebook keys for "login with" functionality
# See instructions in https://simpleisbetterthancomplex.com/tutorial/2016/10/24/how-to-add-social-login-to-django.html
# NOTE: might not work in localhost
SOCIAL_AUTH_FACEBOOK_KEY = None
SOCIAL_AUTH_FACEBOOK_SECRET = 'FACEBOOK_SECRET'
# Github keys for "login with" functionality
# Get credentials at https://github.com/settings/applications/new
# Set callback url to http://localhost:8000/social/complete/github/
SOCIAL_AUTH_GITHUB_KEY = None
SOCIAL_AUTH_GITHUB_SECRET = 'GITHUB_SECRET'
## Instruction:
Remove unused social auth keys
## Code After:
FS_CLIENT_ID = 'FREESOUND_KEY'
FS_CLIENT_SECRET = 'FREESOUND_SECRET'
# Freesound keys for "login with" functionality
# Get credentials at http://www.freesound.org/apiv2/apply
# Set callback url to http://localhost:8000/social/complete/freesound/
SOCIAL_AUTH_FREESOUND_KEY = None
SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET'
|
FS_CLIENT_ID = 'FREESOUND_KEY'
FS_CLIENT_SECRET = 'FREESOUND_SECRET'
# Freesound keys for "login with" functionality
# Get credentials at http://www.freesound.org/apiv2/apply
# Set callback url to http://localhost:8000/social/complete/freesound/
SOCIAL_AUTH_FREESOUND_KEY = None
SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET'
-
- # Google keys for "login with" functionality
- # Get credentials at https://console.developers.google.com
- # Set callback url to http://localhost:8000/social/complete/google-oauth2/
- SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = None # (remove the part starting with the dot .)
- SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = 'GOOGLE_SECRET'
-
- # Facebook keys for "login with" functionality
- # See instructions in https://simpleisbetterthancomplex.com/tutorial/2016/10/24/how-to-add-social-login-to-django.html
- # NOTE: might not work in localhost
- SOCIAL_AUTH_FACEBOOK_KEY = None
- SOCIAL_AUTH_FACEBOOK_SECRET = 'FACEBOOK_SECRET'
-
- # Github keys for "login with" functionality
- # Get credentials at https://github.com/settings/applications/new
- # Set callback url to http://localhost:8000/social/complete/github/
- SOCIAL_AUTH_GITHUB_KEY = None
- SOCIAL_AUTH_GITHUB_SECRET = 'GITHUB_SECRET'
|
91ea026fa0c354c81cf0a1e52dbbe626b83a00f8
|
app.py
|
app.py
|
import feedparser
from flask import Flask, render_template
app = Flask(__name__)
BBC_FEED = "http://feeds.bbci.co.uk/news/rss.xml"
@app.route("/")
def index():
feed = feedparser.parse(BBC_FEED)
return render_template("index.html", feed=feed.get('entries'))
if __name__ == "__main__":
app.run()
|
import requests
from flask import Flask, render_template
app = Flask(__name__)
BBC_FEED = "http://feeds.bbci.co.uk/news/rss.xml"
API_KEY = "c4002216fa5446d582b5f31d73959d36"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source=the-next-web&sortBy=latest&apiKey={API_KEY}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run()
|
Use requests instead of feedparser.
|
Use requests instead of feedparser.
|
Python
|
mit
|
alchermd/headlines,alchermd/headlines
|
- import feedparser
+ import requests
from flask import Flask, render_template
app = Flask(__name__)
BBC_FEED = "http://feeds.bbci.co.uk/news/rss.xml"
+ API_KEY = "c4002216fa5446d582b5f31d73959d36"
@app.route("/")
def index():
- feed = feedparser.parse(BBC_FEED)
+ r = requests.get(
+ f"https://newsapi.org/v1/articles?source=the-next-web&sortBy=latest&apiKey={API_KEY}"
+ )
- return render_template("index.html", feed=feed.get('entries'))
+ return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run()
|
Use requests instead of feedparser.
|
## Code Before:
import feedparser
from flask import Flask, render_template
app = Flask(__name__)
BBC_FEED = "http://feeds.bbci.co.uk/news/rss.xml"
@app.route("/")
def index():
feed = feedparser.parse(BBC_FEED)
return render_template("index.html", feed=feed.get('entries'))
if __name__ == "__main__":
app.run()
## Instruction:
Use requests instead of feedparser.
## Code After:
import requests
from flask import Flask, render_template
app = Flask(__name__)
BBC_FEED = "http://feeds.bbci.co.uk/news/rss.xml"
API_KEY = "c4002216fa5446d582b5f31d73959d36"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source=the-next-web&sortBy=latest&apiKey={API_KEY}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run()
|
- import feedparser
+ import requests
from flask import Flask, render_template
app = Flask(__name__)
BBC_FEED = "http://feeds.bbci.co.uk/news/rss.xml"
+ API_KEY = "c4002216fa5446d582b5f31d73959d36"
@app.route("/")
def index():
- feed = feedparser.parse(BBC_FEED)
+ r = requests.get(
+ f"https://newsapi.org/v1/articles?source=the-next-web&sortBy=latest&apiKey={API_KEY}"
+ )
- return render_template("index.html", feed=feed.get('entries'))
? ^ ^^ ^^^^ ^^^ - ^
+ return render_template("index.html", articles=r.json().get("articles"))
? ^^^^^^ ^ ^^^^^^^^ ^^^ ++ ^
if __name__ == "__main__":
app.run()
|
74b03f3d47011bad6129f8ccfe466a4b28d2338a
|
troposphere/workspaces.py
|
troposphere/workspaces.py
|
from . import AWSObject
from .validators import boolean
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
}
|
from . import AWSObject, AWSProperty, Tags
from .validators import boolean, integer
class WorkspaceProperties(AWSProperty):
props = {
'ComputeTypeName': (basestring, False),
'RootVolumeSizeGib': (integer, False),
'RunningMode': (basestring, False),
'RunningModeAutoStopTimeoutInMinutes': (integer, False),
'UserVolumeSizeGib': (integer, False),
}
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'Tags': (Tags, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
'WorkspaceProperties': (WorkspaceProperties, False),
}
|
Add Tags and WorkspaceProperties to WorkSpaces::Workspace
|
Add Tags and WorkspaceProperties to WorkSpaces::Workspace
|
Python
|
bsd-2-clause
|
johnctitus/troposphere,cloudtools/troposphere,johnctitus/troposphere,pas256/troposphere,pas256/troposphere,cloudtools/troposphere,ikben/troposphere,ikben/troposphere
|
- from . import AWSObject
+ from . import AWSObject, AWSProperty, Tags
- from .validators import boolean
+ from .validators import boolean, integer
+
+
+ class WorkspaceProperties(AWSProperty):
+ props = {
+ 'ComputeTypeName': (basestring, False),
+ 'RootVolumeSizeGib': (integer, False),
+ 'RunningMode': (basestring, False),
+ 'RunningModeAutoStopTimeoutInMinutes': (integer, False),
+ 'UserVolumeSizeGib': (integer, False),
+ }
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
+ 'Tags': (Tags, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
+ 'WorkspaceProperties': (WorkspaceProperties, False),
}
|
Add Tags and WorkspaceProperties to WorkSpaces::Workspace
|
## Code Before:
from . import AWSObject
from .validators import boolean
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
}
## Instruction:
Add Tags and WorkspaceProperties to WorkSpaces::Workspace
## Code After:
from . import AWSObject, AWSProperty, Tags
from .validators import boolean, integer
class WorkspaceProperties(AWSProperty):
props = {
'ComputeTypeName': (basestring, False),
'RootVolumeSizeGib': (integer, False),
'RunningMode': (basestring, False),
'RunningModeAutoStopTimeoutInMinutes': (integer, False),
'UserVolumeSizeGib': (integer, False),
}
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'Tags': (Tags, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
'WorkspaceProperties': (WorkspaceProperties, False),
}
|
- from . import AWSObject
+ from . import AWSObject, AWSProperty, Tags
- from .validators import boolean
+ from .validators import boolean, integer
? +++++++++
+
+
+ class WorkspaceProperties(AWSProperty):
+ props = {
+ 'ComputeTypeName': (basestring, False),
+ 'RootVolumeSizeGib': (integer, False),
+ 'RunningMode': (basestring, False),
+ 'RunningModeAutoStopTimeoutInMinutes': (integer, False),
+ 'UserVolumeSizeGib': (integer, False),
+ }
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
+ 'Tags': (Tags, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
+ 'WorkspaceProperties': (WorkspaceProperties, False),
}
|
bef1e44e027284e193be889b5ca273c906ae8325
|
snippets/__main__.py
|
snippets/__main__.py
|
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('path')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.path)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.source)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
Make repository source optional in cli
|
Make repository source optional in cli
|
Python
|
isc
|
trilan/snippets,trilan/snippets
|
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
- parser.add_argument('path')
+ parser.add_argument('-s', '--source', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
- repository.add_repopath(args.path)
+ repository.add_repopath(args.source)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
Make repository source optional in cli
|
## Code Before:
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('path')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.path)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
## Instruction:
Make repository source optional in cli
## Code After:
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.source)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
- parser.add_argument('path')
+ parser.add_argument('-s', '--source', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
- repository.add_repopath(args.path)
? ^^^^
+ repository.add_repopath(args.source)
? ^^^^^^
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
0de3f3380eda3ed541fbf37243e13243a5ad6e1e
|
tests/test_open.py
|
tests/test_open.py
|
import unittest
import yv_suggest.open as yvs
import inspect
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
|
import unittest
import yv_suggest.open as yvs
import inspect
class WebbrowserMock(object):
'''mock the builtin webbrowser module'''
def open(self, url):
'''mock the webbrowser.open() function'''
self.url = url
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
def test_url_open(self):
'''should attempt to open URL using webbrowser module'''
mock = self.WebbrowserMock()
yvs.webbrowser = mock
yvs.main('nlt/jhn.3.17')
self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
|
Add unit test for opening bible reference urls
|
Add unit test for opening bible reference urls
|
Python
|
mit
|
caleb531/youversion-suggest,caleb531/youversion-suggest
|
import unittest
import yv_suggest.open as yvs
import inspect
+
+ class WebbrowserMock(object):
+ '''mock the builtin webbrowser module'''
+
+ def open(self, url):
+ '''mock the webbrowser.open() function'''
+ self.url = url
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
+ def test_url_open(self):
+ '''should attempt to open URL using webbrowser module'''
+ mock = self.WebbrowserMock()
+ yvs.webbrowser = mock
+ yvs.main('nlt/jhn.3.17')
+ self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
+
|
Add unit test for opening bible reference urls
|
## Code Before:
import unittest
import yv_suggest.open as yvs
import inspect
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
## Instruction:
Add unit test for opening bible reference urls
## Code After:
import unittest
import yv_suggest.open as yvs
import inspect
class WebbrowserMock(object):
'''mock the builtin webbrowser module'''
def open(self, url):
'''mock the webbrowser.open() function'''
self.url = url
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
def test_url_open(self):
'''should attempt to open URL using webbrowser module'''
mock = self.WebbrowserMock()
yvs.webbrowser = mock
yvs.main('nlt/jhn.3.17')
self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
|
import unittest
import yv_suggest.open as yvs
import inspect
+
+ class WebbrowserMock(object):
+ '''mock the builtin webbrowser module'''
+
+ def open(self, url):
+ '''mock the webbrowser.open() function'''
+ self.url = url
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
+
+ def test_url_open(self):
+ '''should attempt to open URL using webbrowser module'''
+ mock = self.WebbrowserMock()
+ yvs.webbrowser = mock
+ yvs.main('nlt/jhn.3.17')
+ self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
|
4bb8a61cde27575865cdd2b7df5afcb5d6860523
|
fmriprep/interfaces/tests/test_reports.py
|
fmriprep/interfaces/tests/test_reports.py
|
import pytest
from ..reports import get_world_pedir
@pytest.mark.parametrize("orientation,pe_dir,expected", [
('RAS', 'j', 'Posterior-Anterior'),
('RAS', 'j-', 'Anterior-Posterior'),
('RAS', 'i', 'Left-Right'),
('RAS', 'i-', 'Right-Left'),
('RAS', 'k', 'Inferior-Superior'),
('RAS', 'k-', 'Superior-Inferior'),
('LAS', 'j', 'Posterior-Anterior'),
('LAS', 'i-', 'Left-Right'),
('LAS', 'k-', 'Superior-Inferior'),
('LPI', 'j', 'Anterior-Posterior'),
('LPI', 'i-', 'Left-Right'),
('LPI', 'k-', 'Inferior-Superior'),
])
def test_get_world_pedir(tmpdir, orientation, pe_dir, expected):
assert get_world_pedir(orientation, pe_dir) == expected
|
import pytest
from ..reports import get_world_pedir
@pytest.mark.parametrize("orientation,pe_dir,expected", [
('RAS', 'j', 'Posterior-Anterior'),
('RAS', 'j-', 'Anterior-Posterior'),
('RAS', 'i', 'Left-Right'),
('RAS', 'i-', 'Right-Left'),
('RAS', 'k', 'Inferior-Superior'),
('RAS', 'k-', 'Superior-Inferior'),
('LAS', 'j', 'Posterior-Anterior'),
('LAS', 'i-', 'Left-Right'),
('LAS', 'k-', 'Superior-Inferior'),
('LPI', 'j', 'Anterior-Posterior'),
('LPI', 'i-', 'Left-Right'),
('LPI', 'k-', 'Inferior-Superior'),
('SLP', 'k-', 'Posterior-Anterior'),
('SLP', 'k', 'Anterior-Posterior'),
('SLP', 'j-', 'Left-Right'),
('SLP', 'j', 'Right-Left'),
('SLP', 'i', 'Inferior-Superior'),
('SLP', 'i-', 'Superior-Inferior'),
])
def test_get_world_pedir(tmpdir, orientation, pe_dir, expected):
assert get_world_pedir(orientation, pe_dir) == expected
|
Add weird SLP orientation to get_world_pedir
|
TEST: Add weird SLP orientation to get_world_pedir
|
Python
|
bsd-3-clause
|
oesteban/fmriprep,poldracklab/preprocessing-workflow,oesteban/fmriprep,poldracklab/preprocessing-workflow,oesteban/fmriprep
|
import pytest
from ..reports import get_world_pedir
@pytest.mark.parametrize("orientation,pe_dir,expected", [
('RAS', 'j', 'Posterior-Anterior'),
('RAS', 'j-', 'Anterior-Posterior'),
('RAS', 'i', 'Left-Right'),
('RAS', 'i-', 'Right-Left'),
('RAS', 'k', 'Inferior-Superior'),
('RAS', 'k-', 'Superior-Inferior'),
('LAS', 'j', 'Posterior-Anterior'),
('LAS', 'i-', 'Left-Right'),
('LAS', 'k-', 'Superior-Inferior'),
('LPI', 'j', 'Anterior-Posterior'),
('LPI', 'i-', 'Left-Right'),
('LPI', 'k-', 'Inferior-Superior'),
+ ('SLP', 'k-', 'Posterior-Anterior'),
+ ('SLP', 'k', 'Anterior-Posterior'),
+ ('SLP', 'j-', 'Left-Right'),
+ ('SLP', 'j', 'Right-Left'),
+ ('SLP', 'i', 'Inferior-Superior'),
+ ('SLP', 'i-', 'Superior-Inferior'),
])
def test_get_world_pedir(tmpdir, orientation, pe_dir, expected):
assert get_world_pedir(orientation, pe_dir) == expected
|
Add weird SLP orientation to get_world_pedir
|
## Code Before:
import pytest
from ..reports import get_world_pedir
@pytest.mark.parametrize("orientation,pe_dir,expected", [
('RAS', 'j', 'Posterior-Anterior'),
('RAS', 'j-', 'Anterior-Posterior'),
('RAS', 'i', 'Left-Right'),
('RAS', 'i-', 'Right-Left'),
('RAS', 'k', 'Inferior-Superior'),
('RAS', 'k-', 'Superior-Inferior'),
('LAS', 'j', 'Posterior-Anterior'),
('LAS', 'i-', 'Left-Right'),
('LAS', 'k-', 'Superior-Inferior'),
('LPI', 'j', 'Anterior-Posterior'),
('LPI', 'i-', 'Left-Right'),
('LPI', 'k-', 'Inferior-Superior'),
])
def test_get_world_pedir(tmpdir, orientation, pe_dir, expected):
assert get_world_pedir(orientation, pe_dir) == expected
## Instruction:
Add weird SLP orientation to get_world_pedir
## Code After:
import pytest
from ..reports import get_world_pedir
@pytest.mark.parametrize("orientation,pe_dir,expected", [
('RAS', 'j', 'Posterior-Anterior'),
('RAS', 'j-', 'Anterior-Posterior'),
('RAS', 'i', 'Left-Right'),
('RAS', 'i-', 'Right-Left'),
('RAS', 'k', 'Inferior-Superior'),
('RAS', 'k-', 'Superior-Inferior'),
('LAS', 'j', 'Posterior-Anterior'),
('LAS', 'i-', 'Left-Right'),
('LAS', 'k-', 'Superior-Inferior'),
('LPI', 'j', 'Anterior-Posterior'),
('LPI', 'i-', 'Left-Right'),
('LPI', 'k-', 'Inferior-Superior'),
('SLP', 'k-', 'Posterior-Anterior'),
('SLP', 'k', 'Anterior-Posterior'),
('SLP', 'j-', 'Left-Right'),
('SLP', 'j', 'Right-Left'),
('SLP', 'i', 'Inferior-Superior'),
('SLP', 'i-', 'Superior-Inferior'),
])
def test_get_world_pedir(tmpdir, orientation, pe_dir, expected):
assert get_world_pedir(orientation, pe_dir) == expected
|
import pytest
from ..reports import get_world_pedir
@pytest.mark.parametrize("orientation,pe_dir,expected", [
('RAS', 'j', 'Posterior-Anterior'),
('RAS', 'j-', 'Anterior-Posterior'),
('RAS', 'i', 'Left-Right'),
('RAS', 'i-', 'Right-Left'),
('RAS', 'k', 'Inferior-Superior'),
('RAS', 'k-', 'Superior-Inferior'),
('LAS', 'j', 'Posterior-Anterior'),
('LAS', 'i-', 'Left-Right'),
('LAS', 'k-', 'Superior-Inferior'),
('LPI', 'j', 'Anterior-Posterior'),
('LPI', 'i-', 'Left-Right'),
('LPI', 'k-', 'Inferior-Superior'),
+ ('SLP', 'k-', 'Posterior-Anterior'),
+ ('SLP', 'k', 'Anterior-Posterior'),
+ ('SLP', 'j-', 'Left-Right'),
+ ('SLP', 'j', 'Right-Left'),
+ ('SLP', 'i', 'Inferior-Superior'),
+ ('SLP', 'i-', 'Superior-Inferior'),
])
def test_get_world_pedir(tmpdir, orientation, pe_dir, expected):
assert get_world_pedir(orientation, pe_dir) == expected
|
0ed72241dc9f540615954f58995d96401d954a41
|
courtreader/opener.py
|
courtreader/opener.py
|
import cookielib
import os
import pickle
import urllib2
class Opener:
user_agent = u"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; " + \
u"en-US; rv:1.9.2.11) Gecko/20101012 Firefox/3.6.11"
def __init__(self, name):
self.name = name
# Create page opener that stores cookie
self.cookieJar = cookielib.CookieJar()
cookie_processor = urllib2.HTTPCookieProcessor(self.cookieJar)
self.opener = urllib2.build_opener(cookie_processor)
self.opener.addheaders = [('User-Agent', Opener.user_agent)]
# Try to load cookies
#if os.path.isfile(self.name + '.cookie'):
# with open(self.name + '.cookie', 'r') as f:
# for cookie in pickle.loads(f.read()):
# self.cookieJar.set_cookie(cookie)
def set_cookie(self, name, value):
for cookie in self.cookieJar:
if cookie.name == name:
cookie.value = value
def save_cookies(self):
with open(self.name + '.cookie', 'w') as f:
f.write(pickle.dumps(list(self.cookieJar)))
def open(self, *args):
url = args[0]
if len(args) == 2:
data = args[1]
return self.opener.open(url, data)
return self.opener.open(url)
|
import cookielib
import os
import pickle
import mechanize
class Opener:
def __init__(self, name):
self.opener = mechanize.Browser()
self.opener.set_handle_robots(False)
def set_cookie(self, name, value):
self.opener.set_cookie(str(name) + '=' + str(value))
def save_cookies(self):
return
def open(self, *args):
url = args[0]
if len(args) == 2:
data = args[1]
return self.opener.open(url, data)
return self.opener.open(url)
|
Use mechanize instead of urllib2
|
Use mechanize instead of urllib2
|
Python
|
mit
|
bschoenfeld/va-court-scraper,bschoenfeld/va-court-scraper
|
import cookielib
import os
import pickle
- import urllib2
+ import mechanize
class Opener:
- user_agent = u"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; " + \
- u"en-US; rv:1.9.2.11) Gecko/20101012 Firefox/3.6.11"
-
def __init__(self, name):
+ self.opener = mechanize.Browser()
+ self.opener.set_handle_robots(False)
- self.name = name
- # Create page opener that stores cookie
- self.cookieJar = cookielib.CookieJar()
- cookie_processor = urllib2.HTTPCookieProcessor(self.cookieJar)
- self.opener = urllib2.build_opener(cookie_processor)
- self.opener.addheaders = [('User-Agent', Opener.user_agent)]
-
- # Try to load cookies
- #if os.path.isfile(self.name + '.cookie'):
- # with open(self.name + '.cookie', 'r') as f:
- # for cookie in pickle.loads(f.read()):
- # self.cookieJar.set_cookie(cookie)
def set_cookie(self, name, value):
+ self.opener.set_cookie(str(name) + '=' + str(value))
- for cookie in self.cookieJar:
- if cookie.name == name:
- cookie.value = value
def save_cookies(self):
+ return
- with open(self.name + '.cookie', 'w') as f:
- f.write(pickle.dumps(list(self.cookieJar)))
def open(self, *args):
url = args[0]
if len(args) == 2:
data = args[1]
return self.opener.open(url, data)
return self.opener.open(url)
|
Use mechanize instead of urllib2
|
## Code Before:
import cookielib
import os
import pickle
import urllib2
class Opener:
user_agent = u"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; " + \
u"en-US; rv:1.9.2.11) Gecko/20101012 Firefox/3.6.11"
def __init__(self, name):
self.name = name
# Create page opener that stores cookie
self.cookieJar = cookielib.CookieJar()
cookie_processor = urllib2.HTTPCookieProcessor(self.cookieJar)
self.opener = urllib2.build_opener(cookie_processor)
self.opener.addheaders = [('User-Agent', Opener.user_agent)]
# Try to load cookies
#if os.path.isfile(self.name + '.cookie'):
# with open(self.name + '.cookie', 'r') as f:
# for cookie in pickle.loads(f.read()):
# self.cookieJar.set_cookie(cookie)
def set_cookie(self, name, value):
for cookie in self.cookieJar:
if cookie.name == name:
cookie.value = value
def save_cookies(self):
with open(self.name + '.cookie', 'w') as f:
f.write(pickle.dumps(list(self.cookieJar)))
def open(self, *args):
url = args[0]
if len(args) == 2:
data = args[1]
return self.opener.open(url, data)
return self.opener.open(url)
## Instruction:
Use mechanize instead of urllib2
## Code After:
import cookielib
import os
import pickle
import mechanize
class Opener:
def __init__(self, name):
self.opener = mechanize.Browser()
self.opener.set_handle_robots(False)
def set_cookie(self, name, value):
self.opener.set_cookie(str(name) + '=' + str(value))
def save_cookies(self):
return
def open(self, *args):
url = args[0]
if len(args) == 2:
data = args[1]
return self.opener.open(url, data)
return self.opener.open(url)
|
import cookielib
import os
import pickle
- import urllib2
+ import mechanize
class Opener:
- user_agent = u"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; " + \
- u"en-US; rv:1.9.2.11) Gecko/20101012 Firefox/3.6.11"
-
def __init__(self, name):
+ self.opener = mechanize.Browser()
+ self.opener.set_handle_robots(False)
- self.name = name
- # Create page opener that stores cookie
- self.cookieJar = cookielib.CookieJar()
- cookie_processor = urllib2.HTTPCookieProcessor(self.cookieJar)
- self.opener = urllib2.build_opener(cookie_processor)
- self.opener.addheaders = [('User-Agent', Opener.user_agent)]
-
- # Try to load cookies
- #if os.path.isfile(self.name + '.cookie'):
- # with open(self.name + '.cookie', 'r') as f:
- # for cookie in pickle.loads(f.read()):
- # self.cookieJar.set_cookie(cookie)
def set_cookie(self, name, value):
+ self.opener.set_cookie(str(name) + '=' + str(value))
- for cookie in self.cookieJar:
- if cookie.name == name:
- cookie.value = value
def save_cookies(self):
+ return
- with open(self.name + '.cookie', 'w') as f:
- f.write(pickle.dumps(list(self.cookieJar)))
def open(self, *args):
url = args[0]
if len(args) == 2:
data = args[1]
return self.opener.open(url, data)
return self.opener.open(url)
|
cfeaf5b01b6c822b2351a556e48a1a68aa2bce88
|
glue_vispy_viewers/volume/tests/test_glue_viewer.py
|
glue_vispy_viewers/volume/tests/test_glue_viewer.py
|
import operator
import numpy as np
from glue.qt import get_qapp
from glue.core.data import Data
from glue.core.data_collection import DataCollection
from glue.app.qt.application import GlueApplication
from glue.core.subset import InequalitySubsetState
# from glue.core.tests.util import simple_session
from ..vol_glue_viewer import GlueVispyViewer
def test_viewer():
app = get_qapp()
data = Data(x=np.arange(1000).reshape((10, 10, 10)) / 1000.)
dc = DataCollection([data])
app = GlueApplication(dc)
app.new_data_viewer(GlueVispyViewer, data=data)
subset_state1 = InequalitySubsetState(data.find_component_id('x'), 2/3., operator.gt)
dc.new_subset_group(label='test_subset1', subset_state=subset_state1)
subset_state2 = InequalitySubsetState(data.find_component_id('x'), 1/3., operator.lt)
dc.new_subset_group(label='test_subset2', subset_state=subset_state2)
app.show()
|
import operator
import numpy as np
from glue.qt import get_qapp
from glue.core.data import Data
from glue.core.data_collection import DataCollection
try:
from glue.app.qt.application import GlueApplication
except:
from glue.qt.glue_application import GlueApplication
from glue.core.subset import InequalitySubsetState
# from glue.core.tests.util import simple_session
from ..vol_glue_viewer import GlueVispyViewer
def test_viewer():
app = get_qapp()
data = Data(x=np.arange(1000).reshape((10, 10, 10)) / 1000.)
dc = DataCollection([data])
app = GlueApplication(dc)
app.new_data_viewer(GlueVispyViewer, data=data)
subset_state1 = InequalitySubsetState(data.find_component_id('x'), 2/3., operator.gt)
dc.new_subset_group(label='test_subset1', subset_state=subset_state1)
subset_state2 = InequalitySubsetState(data.find_component_id('x'), 1/3., operator.lt)
dc.new_subset_group(label='test_subset2', subset_state=subset_state2)
app.show()
|
Fix compatibility with latest stable glue version
|
Fix compatibility with latest stable glue version
|
Python
|
bsd-2-clause
|
PennyQ/astro-vispy,PennyQ/glue-3d-viewer,astrofrog/glue-vispy-viewers,glue-viz/glue-3d-viewer,glue-viz/glue-vispy-viewers,astrofrog/glue-3d-viewer
|
import operator
import numpy as np
from glue.qt import get_qapp
from glue.core.data import Data
from glue.core.data_collection import DataCollection
+
+ try:
- from glue.app.qt.application import GlueApplication
+ from glue.app.qt.application import GlueApplication
+ except:
+ from glue.qt.glue_application import GlueApplication
+
from glue.core.subset import InequalitySubsetState
# from glue.core.tests.util import simple_session
from ..vol_glue_viewer import GlueVispyViewer
def test_viewer():
app = get_qapp()
data = Data(x=np.arange(1000).reshape((10, 10, 10)) / 1000.)
dc = DataCollection([data])
app = GlueApplication(dc)
app.new_data_viewer(GlueVispyViewer, data=data)
subset_state1 = InequalitySubsetState(data.find_component_id('x'), 2/3., operator.gt)
dc.new_subset_group(label='test_subset1', subset_state=subset_state1)
subset_state2 = InequalitySubsetState(data.find_component_id('x'), 1/3., operator.lt)
dc.new_subset_group(label='test_subset2', subset_state=subset_state2)
app.show()
|
Fix compatibility with latest stable glue version
|
## Code Before:
import operator
import numpy as np
from glue.qt import get_qapp
from glue.core.data import Data
from glue.core.data_collection import DataCollection
from glue.app.qt.application import GlueApplication
from glue.core.subset import InequalitySubsetState
# from glue.core.tests.util import simple_session
from ..vol_glue_viewer import GlueVispyViewer
def test_viewer():
app = get_qapp()
data = Data(x=np.arange(1000).reshape((10, 10, 10)) / 1000.)
dc = DataCollection([data])
app = GlueApplication(dc)
app.new_data_viewer(GlueVispyViewer, data=data)
subset_state1 = InequalitySubsetState(data.find_component_id('x'), 2/3., operator.gt)
dc.new_subset_group(label='test_subset1', subset_state=subset_state1)
subset_state2 = InequalitySubsetState(data.find_component_id('x'), 1/3., operator.lt)
dc.new_subset_group(label='test_subset2', subset_state=subset_state2)
app.show()
## Instruction:
Fix compatibility with latest stable glue version
## Code After:
import operator
import numpy as np
from glue.qt import get_qapp
from glue.core.data import Data
from glue.core.data_collection import DataCollection
try:
from glue.app.qt.application import GlueApplication
except:
from glue.qt.glue_application import GlueApplication
from glue.core.subset import InequalitySubsetState
# from glue.core.tests.util import simple_session
from ..vol_glue_viewer import GlueVispyViewer
def test_viewer():
app = get_qapp()
data = Data(x=np.arange(1000).reshape((10, 10, 10)) / 1000.)
dc = DataCollection([data])
app = GlueApplication(dc)
app.new_data_viewer(GlueVispyViewer, data=data)
subset_state1 = InequalitySubsetState(data.find_component_id('x'), 2/3., operator.gt)
dc.new_subset_group(label='test_subset1', subset_state=subset_state1)
subset_state2 = InequalitySubsetState(data.find_component_id('x'), 1/3., operator.lt)
dc.new_subset_group(label='test_subset2', subset_state=subset_state2)
app.show()
|
import operator
import numpy as np
from glue.qt import get_qapp
from glue.core.data import Data
from glue.core.data_collection import DataCollection
+
+ try:
- from glue.app.qt.application import GlueApplication
+ from glue.app.qt.application import GlueApplication
? ++++
+ except:
+ from glue.qt.glue_application import GlueApplication
+
from glue.core.subset import InequalitySubsetState
# from glue.core.tests.util import simple_session
from ..vol_glue_viewer import GlueVispyViewer
def test_viewer():
app = get_qapp()
data = Data(x=np.arange(1000).reshape((10, 10, 10)) / 1000.)
dc = DataCollection([data])
app = GlueApplication(dc)
app.new_data_viewer(GlueVispyViewer, data=data)
subset_state1 = InequalitySubsetState(data.find_component_id('x'), 2/3., operator.gt)
dc.new_subset_group(label='test_subset1', subset_state=subset_state1)
subset_state2 = InequalitySubsetState(data.find_component_id('x'), 1/3., operator.lt)
dc.new_subset_group(label='test_subset2', subset_state=subset_state2)
app.show()
|
c5b73be1bf0f0edd05c4743c2449bee568d01c76
|
setup.py
|
setup.py
|
from distutils.core import setup
from turbasen import VERSION
name = 'turbasen'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
author='Ali Kaafarani',
author_email='[email protected]',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
|
from distutils.core import setup
from os import path
from turbasen import VERSION
name = 'turbasen'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description=long_description,
author='Ali Kaafarani',
author_email='[email protected]',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
|
Add long description from README
|
Add long description from README
|
Python
|
mit
|
Turbasen/turbasen.py
|
from distutils.core import setup
+ from os import path
from turbasen import VERSION
name = 'turbasen'
+
+ here = path.abspath(path.dirname(__file__))
+ with open(path.join(here, 'README.md'), encoding='utf-8') as f:
+ long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
+ long_description=long_description,
author='Ali Kaafarani',
author_email='[email protected]',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
|
Add long description from README
|
## Code Before:
from distutils.core import setup
from turbasen import VERSION
name = 'turbasen'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
author='Ali Kaafarani',
author_email='[email protected]',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
## Instruction:
Add long description from README
## Code After:
from distutils.core import setup
from os import path
from turbasen import VERSION
name = 'turbasen'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description=long_description,
author='Ali Kaafarani',
author_email='[email protected]',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
|
from distutils.core import setup
+ from os import path
from turbasen import VERSION
name = 'turbasen'
+
+ here = path.abspath(path.dirname(__file__))
+ with open(path.join(here, 'README.md'), encoding='utf-8') as f:
+ long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
+ long_description=long_description,
author='Ali Kaafarani',
author_email='[email protected]',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
|
6c4883d6e4e65c9d6618244d821ca44c59ca5d58
|
tests/test_prepare.py
|
tests/test_prepare.py
|
from asyncpg import _testbase as tb
class TestPrepare(tb.ConnectedTestCase):
async def test_prepare_1(self):
st = await self.con.prepare('SELECT 1 = $1 AS test')
rec = (await st.execute(1))[0]
self.assertTrue(rec['test'])
self.assertEqual(len(rec), 1)
self.assertEqual(tuple(rec), (True,))
self.assertEqual(False, (await st.execute(10))[0][0])
async def test_prepare_2(self):
with self.assertRaisesRegex(Exception, 'column "a" does not exist'):
await self.con.prepare('SELECT a')
async def test_prepare_3(self):
st = await self.con.prepare('''
SELECT CASE WHEN $1::text IS NULL THEN 'NULL'
ELSE $1::text END''')
self.assertEqual((await st.execute('aaa'))[0][0], 'aaa')
self.assertEqual((await st.execute(None))[0][0], 'NULL')
|
from asyncpg import _testbase as tb
class TestPrepare(tb.ConnectedTestCase):
async def test_prepare_1(self):
st = await self.con.prepare('SELECT 1 = $1 AS test')
rec = (await st.execute(1))[0]
self.assertTrue(rec['test'])
self.assertEqual(len(rec), 1)
self.assertEqual(tuple(rec), (True,))
self.assertEqual(False, (await st.execute(10))[0][0])
async def test_prepare_2(self):
with self.assertRaisesRegex(Exception, 'column "a" does not exist'):
await self.con.prepare('SELECT a')
async def test_prepare_3(self):
cases = [
('text', ("'NULL'", 'NULL'), [
'aaa',
None
]),
('decimal', ('0', 0), [
123,
123.5,
None
])
]
for type, (none_name, none_val), vals in cases:
st = await self.con.prepare('''
SELECT CASE WHEN $1::{type} IS NULL THEN {default}
ELSE $1::{type} END'''.format(
type=type, default=none_name))
for val in vals:
with self.subTest(type=type, value=val):
res = (await st.execute(val))[0][0]
if val is None:
self.assertEqual(res, none_val)
else:
self.assertEqual(res, val)
|
Test that we handle None->NULL conversion for TEXT and BINARY
|
tests: Test that we handle None->NULL conversion for TEXT and BINARY
|
Python
|
apache-2.0
|
MagicStack/asyncpg,MagicStack/asyncpg
|
from asyncpg import _testbase as tb
class TestPrepare(tb.ConnectedTestCase):
async def test_prepare_1(self):
st = await self.con.prepare('SELECT 1 = $1 AS test')
rec = (await st.execute(1))[0]
self.assertTrue(rec['test'])
self.assertEqual(len(rec), 1)
self.assertEqual(tuple(rec), (True,))
self.assertEqual(False, (await st.execute(10))[0][0])
async def test_prepare_2(self):
with self.assertRaisesRegex(Exception, 'column "a" does not exist'):
await self.con.prepare('SELECT a')
async def test_prepare_3(self):
- st = await self.con.prepare('''
- SELECT CASE WHEN $1::text IS NULL THEN 'NULL'
- ELSE $1::text END''')
+ cases = [
+ ('text', ("'NULL'", 'NULL'), [
+ 'aaa',
+ None
+ ]),
- self.assertEqual((await st.execute('aaa'))[0][0], 'aaa')
- self.assertEqual((await st.execute(None))[0][0], 'NULL')
+ ('decimal', ('0', 0), [
+ 123,
+ 123.5,
+ None
+ ])
+ ]
+ for type, (none_name, none_val), vals in cases:
+ st = await self.con.prepare('''
+ SELECT CASE WHEN $1::{type} IS NULL THEN {default}
+ ELSE $1::{type} END'''.format(
+ type=type, default=none_name))
+
+ for val in vals:
+ with self.subTest(type=type, value=val):
+ res = (await st.execute(val))[0][0]
+ if val is None:
+ self.assertEqual(res, none_val)
+ else:
+ self.assertEqual(res, val)
+
|
Test that we handle None->NULL conversion for TEXT and BINARY
|
## Code Before:
from asyncpg import _testbase as tb
class TestPrepare(tb.ConnectedTestCase):
async def test_prepare_1(self):
st = await self.con.prepare('SELECT 1 = $1 AS test')
rec = (await st.execute(1))[0]
self.assertTrue(rec['test'])
self.assertEqual(len(rec), 1)
self.assertEqual(tuple(rec), (True,))
self.assertEqual(False, (await st.execute(10))[0][0])
async def test_prepare_2(self):
with self.assertRaisesRegex(Exception, 'column "a" does not exist'):
await self.con.prepare('SELECT a')
async def test_prepare_3(self):
st = await self.con.prepare('''
SELECT CASE WHEN $1::text IS NULL THEN 'NULL'
ELSE $1::text END''')
self.assertEqual((await st.execute('aaa'))[0][0], 'aaa')
self.assertEqual((await st.execute(None))[0][0], 'NULL')
## Instruction:
Test that we handle None->NULL conversion for TEXT and BINARY
## Code After:
from asyncpg import _testbase as tb
class TestPrepare(tb.ConnectedTestCase):
async def test_prepare_1(self):
st = await self.con.prepare('SELECT 1 = $1 AS test')
rec = (await st.execute(1))[0]
self.assertTrue(rec['test'])
self.assertEqual(len(rec), 1)
self.assertEqual(tuple(rec), (True,))
self.assertEqual(False, (await st.execute(10))[0][0])
async def test_prepare_2(self):
with self.assertRaisesRegex(Exception, 'column "a" does not exist'):
await self.con.prepare('SELECT a')
async def test_prepare_3(self):
cases = [
('text', ("'NULL'", 'NULL'), [
'aaa',
None
]),
('decimal', ('0', 0), [
123,
123.5,
None
])
]
for type, (none_name, none_val), vals in cases:
st = await self.con.prepare('''
SELECT CASE WHEN $1::{type} IS NULL THEN {default}
ELSE $1::{type} END'''.format(
type=type, default=none_name))
for val in vals:
with self.subTest(type=type, value=val):
res = (await st.execute(val))[0][0]
if val is None:
self.assertEqual(res, none_val)
else:
self.assertEqual(res, val)
|
from asyncpg import _testbase as tb
class TestPrepare(tb.ConnectedTestCase):
async def test_prepare_1(self):
st = await self.con.prepare('SELECT 1 = $1 AS test')
rec = (await st.execute(1))[0]
self.assertTrue(rec['test'])
self.assertEqual(len(rec), 1)
self.assertEqual(tuple(rec), (True,))
self.assertEqual(False, (await st.execute(10))[0][0])
async def test_prepare_2(self):
with self.assertRaisesRegex(Exception, 'column "a" does not exist'):
await self.con.prepare('SELECT a')
async def test_prepare_3(self):
- st = await self.con.prepare('''
- SELECT CASE WHEN $1::text IS NULL THEN 'NULL'
- ELSE $1::text END''')
+ cases = [
+ ('text', ("'NULL'", 'NULL'), [
+ 'aaa',
+ None
+ ]),
- self.assertEqual((await st.execute('aaa'))[0][0], 'aaa')
- self.assertEqual((await st.execute(None))[0][0], 'NULL')
+ ('decimal', ('0', 0), [
+ 123,
+ 123.5,
+ None
+ ])
+ ]
+
+ for type, (none_name, none_val), vals in cases:
+ st = await self.con.prepare('''
+ SELECT CASE WHEN $1::{type} IS NULL THEN {default}
+ ELSE $1::{type} END'''.format(
+ type=type, default=none_name))
+
+ for val in vals:
+ with self.subTest(type=type, value=val):
+ res = (await st.execute(val))[0][0]
+ if val is None:
+ self.assertEqual(res, none_val)
+ else:
+ self.assertEqual(res, val)
|
77b5680794a7a60dedf687f4a199e48121f96955
|
tests/performance/benchmark_aggregator.py
|
tests/performance/benchmark_aggregator.py
|
from aggregator import MetricsAggregator
class TestAggregatorPerf(object):
def test_aggregation_performance(self):
ma = MetricsAggregator('my.host')
flush_count = 10
loops_per_flush = 10000
metric_count = 5
for _ in xrange(flush_count):
for i in xrange(loops_per_flush):
# Counters
for j in xrange(metric_count):
ma.submit_packets('counter.%s:%s|c' % (j, i))
ma.submit_packets('gauge.%s:%s|g' % (j, i))
ma.submit_packets('histogram.%s:%s|h' % (j, i))
ma.flush()
if __name__ == '__main__':
t = TestAggregatorPerf()
t.test_aggregation_performance()
|
from aggregator import MetricsAggregator
class TestAggregatorPerf(object):
def test_aggregation_performance(self):
ma = MetricsAggregator('my.host')
flush_count = 10
loops_per_flush = 10000
metric_count = 5
for _ in xrange(flush_count):
for i in xrange(loops_per_flush):
# Counters
for j in xrange(metric_count):
ma.submit_packets('counter.%s:%s|c' % (j, i))
ma.submit_packets('gauge.%s:%s|g' % (j, i))
ma.submit_packets('histogram.%s:%s|h' % (j, i))
ma.submit_packets('set.%s:%s|s' % (j, 1.0))
ma.flush()
if __name__ == '__main__':
t = TestAggregatorPerf()
t.test_aggregation_performance()
|
Add sets + a float value to the benchmark.
|
Add sets + a float value to the benchmark.
|
Python
|
bsd-3-clause
|
yuecong/dd-agent,darron/dd-agent,oneandoneis2/dd-agent,AniruddhaSAtre/dd-agent,packetloop/dd-agent,remh/dd-agent,guruxu/dd-agent,jyogi/purvar-agent,relateiq/dd-agent,JohnLZeller/dd-agent,citrusleaf/dd-agent,lookout/dd-agent,citrusleaf/dd-agent,zendesk/dd-agent,zendesk/dd-agent,JohnLZeller/dd-agent,oneandoneis2/dd-agent,relateiq/dd-agent,cberry777/dd-agent,zendesk/dd-agent,polynomial/dd-agent,jyogi/purvar-agent,manolama/dd-agent,darron/dd-agent,pfmooney/dd-agent,huhongbo/dd-agent,ess/dd-agent,tebriel/dd-agent,amalakar/dd-agent,guruxu/dd-agent,jvassev/dd-agent,Mashape/dd-agent,takus/dd-agent,Wattpad/dd-agent,jvassev/dd-agent,jraede/dd-agent,PagerDuty/dd-agent,GabrielNicolasAvellaneda/dd-agent,JohnLZeller/dd-agent,mderomph-coolblue/dd-agent,joelvanvelden/dd-agent,truthbk/dd-agent,tebriel/dd-agent,eeroniemi/dd-agent,joelvanvelden/dd-agent,cberry777/dd-agent,AniruddhaSAtre/dd-agent,indeedops/dd-agent,cberry777/dd-agent,pmav99/praktoras,gphat/dd-agent,amalakar/dd-agent,lookout/dd-agent,polynomial/dd-agent,huhongbo/dd-agent,Wattpad/dd-agent,truthbk/dd-agent,remh/dd-agent,pfmooney/dd-agent,takus/dd-agent,jyogi/purvar-agent,gphat/dd-agent,remh/dd-agent,tebriel/dd-agent,darron/dd-agent,amalakar/dd-agent,manolama/dd-agent,jshum/dd-agent,jamesandariese/dd-agent,Mashape/dd-agent,joelvanvelden/dd-agent,urosgruber/dd-agent,jshum/dd-agent,GabrielNicolasAvellaneda/dd-agent,truthbk/dd-agent,brettlangdon/dd-agent,gphat/dd-agent,polynomial/dd-agent,a20012251/dd-agent,darron/dd-agent,citrusleaf/dd-agent,Mashape/dd-agent,citrusleaf/dd-agent,pmav99/praktoras,mderomph-coolblue/dd-agent,benmccann/dd-agent,PagerDuty/dd-agent,cberry777/dd-agent,eeroniemi/dd-agent,brettlangdon/dd-agent,huhongbo/dd-agent,zendesk/dd-agent,eeroniemi/dd-agent,truthbk/dd-agent,packetloop/dd-agent,amalakar/dd-agent,darron/dd-agent,a20012251/dd-agent,jraede/dd-agent,jamesandariese/dd-agent,gphat/dd-agent,mderomph-coolblue/dd-agent,benmccann/dd-agent,a20012251/dd-agent,takus/dd-agent,polynomial/dd-agent,jamesandariese/dd-agent,ess/dd-agent,jraede/dd-agent,c960657/dd-agent,JohnLZeller/dd-agent,AntoCard/powerdns-recursor_check,oneandoneis2/dd-agent,ess/dd-agent,guruxu/dd-agent,indeedops/dd-agent,benmccann/dd-agent,manolama/dd-agent,Wattpad/dd-agent,manolama/dd-agent,a20012251/dd-agent,cberry777/dd-agent,AntoCard/powerdns-recursor_check,c960657/dd-agent,manolama/dd-agent,relateiq/dd-agent,benmccann/dd-agent,jvassev/dd-agent,jshum/dd-agent,jamesandariese/dd-agent,pfmooney/dd-agent,Wattpad/dd-agent,jshum/dd-agent,urosgruber/dd-agent,pmav99/praktoras,joelvanvelden/dd-agent,benmccann/dd-agent,jyogi/purvar-agent,relateiq/dd-agent,Shopify/dd-agent,AntoCard/powerdns-recursor_check,Shopify/dd-agent,AntoCard/powerdns-recursor_check,ess/dd-agent,Mashape/dd-agent,yuecong/dd-agent,jshum/dd-agent,indeedops/dd-agent,GabrielNicolasAvellaneda/dd-agent,gphat/dd-agent,oneandoneis2/dd-agent,brettlangdon/dd-agent,pfmooney/dd-agent,zendesk/dd-agent,brettlangdon/dd-agent,AntoCard/powerdns-recursor_check,c960657/dd-agent,polynomial/dd-agent,pmav99/praktoras,urosgruber/dd-agent,packetloop/dd-agent,oneandoneis2/dd-agent,pfmooney/dd-agent,guruxu/dd-agent,PagerDuty/dd-agent,GabrielNicolasAvellaneda/dd-agent,relateiq/dd-agent,AniruddhaSAtre/dd-agent,huhongbo/dd-agent,jvassev/dd-agent,c960657/dd-agent,GabrielNicolasAvellaneda/dd-agent,packetloop/dd-agent,lookout/dd-agent,tebriel/dd-agent,urosgruber/dd-agent,takus/dd-agent,AniruddhaSAtre/dd-agent,JohnLZeller/dd-agent,Mashape/dd-agent,c960657/dd-agent,jyogi/purvar-agent,citrusleaf/dd-agent,tebriel/dd-agent,yuecong/dd-agent,packetloop/dd-agent,huhongbo/dd-agent,PagerDuty/dd-agent,indeedops/dd-agent,mderomph-coolblue/dd-agent,jraede/dd-agent,indeedops/dd-agent,ess/dd-agent,jvassev/dd-agent,Wattpad/dd-agent,eeroniemi/dd-agent,lookout/dd-agent,Shopify/dd-agent,jraede/dd-agent,guruxu/dd-agent,yuecong/dd-agent,brettlangdon/dd-agent,PagerDuty/dd-agent,jamesandariese/dd-agent,a20012251/dd-agent,amalakar/dd-agent,Shopify/dd-agent,lookout/dd-agent,mderomph-coolblue/dd-agent,joelvanvelden/dd-agent,yuecong/dd-agent,AniruddhaSAtre/dd-agent,urosgruber/dd-agent,remh/dd-agent,pmav99/praktoras,eeroniemi/dd-agent,Shopify/dd-agent,remh/dd-agent,takus/dd-agent,truthbk/dd-agent
|
from aggregator import MetricsAggregator
class TestAggregatorPerf(object):
def test_aggregation_performance(self):
ma = MetricsAggregator('my.host')
flush_count = 10
loops_per_flush = 10000
metric_count = 5
for _ in xrange(flush_count):
for i in xrange(loops_per_flush):
# Counters
for j in xrange(metric_count):
ma.submit_packets('counter.%s:%s|c' % (j, i))
ma.submit_packets('gauge.%s:%s|g' % (j, i))
ma.submit_packets('histogram.%s:%s|h' % (j, i))
+ ma.submit_packets('set.%s:%s|s' % (j, 1.0))
+
ma.flush()
if __name__ == '__main__':
t = TestAggregatorPerf()
t.test_aggregation_performance()
|
Add sets + a float value to the benchmark.
|
## Code Before:
from aggregator import MetricsAggregator
class TestAggregatorPerf(object):
def test_aggregation_performance(self):
ma = MetricsAggregator('my.host')
flush_count = 10
loops_per_flush = 10000
metric_count = 5
for _ in xrange(flush_count):
for i in xrange(loops_per_flush):
# Counters
for j in xrange(metric_count):
ma.submit_packets('counter.%s:%s|c' % (j, i))
ma.submit_packets('gauge.%s:%s|g' % (j, i))
ma.submit_packets('histogram.%s:%s|h' % (j, i))
ma.flush()
if __name__ == '__main__':
t = TestAggregatorPerf()
t.test_aggregation_performance()
## Instruction:
Add sets + a float value to the benchmark.
## Code After:
from aggregator import MetricsAggregator
class TestAggregatorPerf(object):
def test_aggregation_performance(self):
ma = MetricsAggregator('my.host')
flush_count = 10
loops_per_flush = 10000
metric_count = 5
for _ in xrange(flush_count):
for i in xrange(loops_per_flush):
# Counters
for j in xrange(metric_count):
ma.submit_packets('counter.%s:%s|c' % (j, i))
ma.submit_packets('gauge.%s:%s|g' % (j, i))
ma.submit_packets('histogram.%s:%s|h' % (j, i))
ma.submit_packets('set.%s:%s|s' % (j, 1.0))
ma.flush()
if __name__ == '__main__':
t = TestAggregatorPerf()
t.test_aggregation_performance()
|
from aggregator import MetricsAggregator
class TestAggregatorPerf(object):
def test_aggregation_performance(self):
ma = MetricsAggregator('my.host')
flush_count = 10
loops_per_flush = 10000
metric_count = 5
for _ in xrange(flush_count):
for i in xrange(loops_per_flush):
# Counters
for j in xrange(metric_count):
ma.submit_packets('counter.%s:%s|c' % (j, i))
ma.submit_packets('gauge.%s:%s|g' % (j, i))
ma.submit_packets('histogram.%s:%s|h' % (j, i))
+ ma.submit_packets('set.%s:%s|s' % (j, 1.0))
+
ma.flush()
if __name__ == '__main__':
t = TestAggregatorPerf()
t.test_aggregation_performance()
|
64c8fd3fa18dd6644a67cbd9e9aa5f20eb5e85a7
|
var/spack/packages/mrnet/package.py
|
var/spack/packages/mrnet/package.py
|
from spack import *
class Mrnet(Package):
"""The MRNet Multi-Cast Reduction Network."""
homepage = "http://paradyn.org/mrnet"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
parallel = False
depends_on("boost")
def install(self, spec, prefix):
configure("--prefix=%s" %prefix, "--enable-shared")
make()
make("install")
|
from spack import *
class Mrnet(Package):
"""The MRNet Multi-Cast Reduction Network."""
homepage = "http://paradyn.org/mrnet"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
variant('krelloptions', default=False, description="Also build the MRNet LW threadsafe libraries")
parallel = False
depends_on("boost")
def install(self, spec, prefix):
# Build the MRNet LW thread safe libraries when the krelloptions variant is present
if '+krelloptions' in spec:
configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe")
else:
configure("--prefix=%s" %prefix, "--enable-shared")
make()
make("install")
|
Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries.
|
Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries.
|
Python
|
lgpl-2.1
|
EmreAtes/spack,krafczyk/spack,matthiasdiener/spack,matthiasdiener/spack,tmerrick1/spack,krafczyk/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,lgarren/spack,iulian787/spack,EmreAtes/spack,mfherbst/spack,krafczyk/spack,matthiasdiener/spack,skosukhin/spack,skosukhin/spack,mfherbst/spack,EmreAtes/spack,TheTimmy/spack,lgarren/spack,mfherbst/spack,mfherbst/spack,matthiasdiener/spack,LLNL/spack,lgarren/spack,krafczyk/spack,EmreAtes/spack,tmerrick1/spack,tmerrick1/spack,TheTimmy/spack,lgarren/spack,LLNL/spack,iulian787/spack,krafczyk/spack,skosukhin/spack,LLNL/spack,TheTimmy/spack,TheTimmy/spack,tmerrick1/spack,LLNL/spack,skosukhin/spack,iulian787/spack,TheTimmy/spack,mfherbst/spack,lgarren/spack,skosukhin/spack,tmerrick1/spack,iulian787/spack,iulian787/spack
|
from spack import *
class Mrnet(Package):
"""The MRNet Multi-Cast Reduction Network."""
homepage = "http://paradyn.org/mrnet"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
+ variant('krelloptions', default=False, description="Also build the MRNet LW threadsafe libraries")
parallel = False
depends_on("boost")
def install(self, spec, prefix):
+ # Build the MRNet LW thread safe libraries when the krelloptions variant is present
+ if '+krelloptions' in spec:
+ configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe")
+ else:
- configure("--prefix=%s" %prefix, "--enable-shared")
+ configure("--prefix=%s" %prefix, "--enable-shared")
make()
make("install")
|
Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries.
|
## Code Before:
from spack import *
class Mrnet(Package):
"""The MRNet Multi-Cast Reduction Network."""
homepage = "http://paradyn.org/mrnet"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
parallel = False
depends_on("boost")
def install(self, spec, prefix):
configure("--prefix=%s" %prefix, "--enable-shared")
make()
make("install")
## Instruction:
Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries.
## Code After:
from spack import *
class Mrnet(Package):
"""The MRNet Multi-Cast Reduction Network."""
homepage = "http://paradyn.org/mrnet"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
variant('krelloptions', default=False, description="Also build the MRNet LW threadsafe libraries")
parallel = False
depends_on("boost")
def install(self, spec, prefix):
# Build the MRNet LW thread safe libraries when the krelloptions variant is present
if '+krelloptions' in spec:
configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe")
else:
configure("--prefix=%s" %prefix, "--enable-shared")
make()
make("install")
|
from spack import *
class Mrnet(Package):
"""The MRNet Multi-Cast Reduction Network."""
homepage = "http://paradyn.org/mrnet"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
+ variant('krelloptions', default=False, description="Also build the MRNet LW threadsafe libraries")
parallel = False
depends_on("boost")
def install(self, spec, prefix):
+ # Build the MRNet LW thread safe libraries when the krelloptions variant is present
+ if '+krelloptions' in spec:
+ configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe")
+ else:
- configure("--prefix=%s" %prefix, "--enable-shared")
+ configure("--prefix=%s" %prefix, "--enable-shared")
? +++
make()
make("install")
|
052228bb3ba3c8ec13452f5a8328ed77c30e565d
|
images/hub/canvasauthenticator/canvasauthenticator/__init__.py
|
images/hub/canvasauthenticator/canvasauthenticator/__init__.py
|
from traitlets import List, Unicode
from oauthenticator.generic import GenericOAuthenticator
from tornado import gen
canvas_site = 'https://ucberkeley.test.instructure.com/'
class CanvasAuthenticator(GenericOAuthenticator):
allowed_email_domains = List(
[],
config=True,
help="""
List of domains whose users are authorized to log in.
This relies on the primary email id set in canvas for the user
"""
)
canvas_url = Unicode(
'',
config=True,
help="""
URL to canvas installation to use for authentication.
Must have a trailing slash
"""
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self.canvas_url:
raise ValueError('c.CanvasAuthenticator.canvas_url must be set')
# canvas_url must have a trailing slash
if self.canvas_url[-1] != '/':
raise ValueError('c.CanvasAuthenticator.canvas_url must have a trailing slash')
self.token_url = f'{self.canvas_url}login/oauth2/token'
self.userdata_url = f'{self.canvas_url}api/v1/users/self/profile'
self.extra_params = {
'client_id': self.client_id,
'client_secret': self.client_secret
}
def normalize_username(self,username):
username = username.lower()
# FIXME: allow
username = username.split('@')[0]
return username
|
from traitlets import List, Unicode
from oauthenticator.generic import GenericOAuthenticator
from tornado import gen
canvas_site = 'https://ucberkeley.test.instructure.com/'
class CanvasAuthenticator(GenericOAuthenticator):
allowed_email_domains = List(
[],
config=True,
help="""
List of domains whose users are authorized to log in.
This relies on the primary email id set in canvas for the user
"""
)
canvas_url = Unicode(
'',
config=True,
help="""
URL to canvas installation to use for authentication.
Must have a trailing slash
"""
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self.canvas_url:
raise ValueError('c.CanvasAuthenticator.canvas_url must be set')
# canvas_url must have a trailing slash
if self.canvas_url[-1] != '/':
raise ValueError('c.CanvasAuthenticator.canvas_url must have a trailing slash')
self.token_url = f'{self.canvas_url}login/oauth2/token'
self.userdata_url = f'{self.canvas_url}api/v1/users/self/profile'
self.extra_params = {
'client_id': self.client_id,
'client_secret': self.client_secret
}
def normalize_username(self,username):
username = username.lower()
# To make life easier & match usernames with existing users who were
# created with google auth, we want to strip the domain name. If not,
# we use the full email as the official user name
if username.endswith('@berkeley.edu'):
return username.split('@')[0]
return username
|
Normalize usernames properly to prevent clashes from guest accounts
|
Normalize usernames properly to prevent clashes from guest accounts
Guest accounts can have non berkeley.edu emails, and might
get access to a berkeley.edu user's home directory. This
will prevent that.
|
Python
|
bsd-3-clause
|
ryanlovett/datahub,berkeley-dsep-infra/datahub,ryanlovett/datahub,ryanlovett/datahub,berkeley-dsep-infra/datahub,berkeley-dsep-infra/datahub
|
from traitlets import List, Unicode
from oauthenticator.generic import GenericOAuthenticator
from tornado import gen
canvas_site = 'https://ucberkeley.test.instructure.com/'
class CanvasAuthenticator(GenericOAuthenticator):
allowed_email_domains = List(
[],
config=True,
help="""
List of domains whose users are authorized to log in.
This relies on the primary email id set in canvas for the user
"""
)
canvas_url = Unicode(
'',
config=True,
help="""
URL to canvas installation to use for authentication.
Must have a trailing slash
"""
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self.canvas_url:
raise ValueError('c.CanvasAuthenticator.canvas_url must be set')
# canvas_url must have a trailing slash
if self.canvas_url[-1] != '/':
raise ValueError('c.CanvasAuthenticator.canvas_url must have a trailing slash')
self.token_url = f'{self.canvas_url}login/oauth2/token'
self.userdata_url = f'{self.canvas_url}api/v1/users/self/profile'
self.extra_params = {
'client_id': self.client_id,
'client_secret': self.client_secret
}
def normalize_username(self,username):
username = username.lower()
- # FIXME: allow
+ # To make life easier & match usernames with existing users who were
+ # created with google auth, we want to strip the domain name. If not,
+ # we use the full email as the official user name
+ if username.endswith('@berkeley.edu'):
- username = username.split('@')[0]
+ return username.split('@')[0]
return username
+
|
Normalize usernames properly to prevent clashes from guest accounts
|
## Code Before:
from traitlets import List, Unicode
from oauthenticator.generic import GenericOAuthenticator
from tornado import gen
canvas_site = 'https://ucberkeley.test.instructure.com/'
class CanvasAuthenticator(GenericOAuthenticator):
allowed_email_domains = List(
[],
config=True,
help="""
List of domains whose users are authorized to log in.
This relies on the primary email id set in canvas for the user
"""
)
canvas_url = Unicode(
'',
config=True,
help="""
URL to canvas installation to use for authentication.
Must have a trailing slash
"""
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self.canvas_url:
raise ValueError('c.CanvasAuthenticator.canvas_url must be set')
# canvas_url must have a trailing slash
if self.canvas_url[-1] != '/':
raise ValueError('c.CanvasAuthenticator.canvas_url must have a trailing slash')
self.token_url = f'{self.canvas_url}login/oauth2/token'
self.userdata_url = f'{self.canvas_url}api/v1/users/self/profile'
self.extra_params = {
'client_id': self.client_id,
'client_secret': self.client_secret
}
def normalize_username(self,username):
username = username.lower()
# FIXME: allow
username = username.split('@')[0]
return username
## Instruction:
Normalize usernames properly to prevent clashes from guest accounts
## Code After:
from traitlets import List, Unicode
from oauthenticator.generic import GenericOAuthenticator
from tornado import gen
canvas_site = 'https://ucberkeley.test.instructure.com/'
class CanvasAuthenticator(GenericOAuthenticator):
allowed_email_domains = List(
[],
config=True,
help="""
List of domains whose users are authorized to log in.
This relies on the primary email id set in canvas for the user
"""
)
canvas_url = Unicode(
'',
config=True,
help="""
URL to canvas installation to use for authentication.
Must have a trailing slash
"""
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self.canvas_url:
raise ValueError('c.CanvasAuthenticator.canvas_url must be set')
# canvas_url must have a trailing slash
if self.canvas_url[-1] != '/':
raise ValueError('c.CanvasAuthenticator.canvas_url must have a trailing slash')
self.token_url = f'{self.canvas_url}login/oauth2/token'
self.userdata_url = f'{self.canvas_url}api/v1/users/self/profile'
self.extra_params = {
'client_id': self.client_id,
'client_secret': self.client_secret
}
def normalize_username(self,username):
username = username.lower()
# To make life easier & match usernames with existing users who were
# created with google auth, we want to strip the domain name. If not,
# we use the full email as the official user name
if username.endswith('@berkeley.edu'):
return username.split('@')[0]
return username
|
from traitlets import List, Unicode
from oauthenticator.generic import GenericOAuthenticator
from tornado import gen
canvas_site = 'https://ucberkeley.test.instructure.com/'
class CanvasAuthenticator(GenericOAuthenticator):
allowed_email_domains = List(
[],
config=True,
help="""
List of domains whose users are authorized to log in.
This relies on the primary email id set in canvas for the user
"""
)
canvas_url = Unicode(
'',
config=True,
help="""
URL to canvas installation to use for authentication.
Must have a trailing slash
"""
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self.canvas_url:
raise ValueError('c.CanvasAuthenticator.canvas_url must be set')
# canvas_url must have a trailing slash
if self.canvas_url[-1] != '/':
raise ValueError('c.CanvasAuthenticator.canvas_url must have a trailing slash')
self.token_url = f'{self.canvas_url}login/oauth2/token'
self.userdata_url = f'{self.canvas_url}api/v1/users/self/profile'
self.extra_params = {
'client_id': self.client_id,
'client_secret': self.client_secret
}
def normalize_username(self,username):
username = username.lower()
- # FIXME: allow
+ # To make life easier & match usernames with existing users who were
+ # created with google auth, we want to strip the domain name. If not,
+ # we use the full email as the official user name
+ if username.endswith('@berkeley.edu'):
- username = username.split('@')[0]
? -- -----
+ return username.split('@')[0]
? +++++++
return username
|
b71a96f818c66b5578fb7c4475b67ecdcb16937a
|
recipes/recipe_modules/gclient/tests/sync_failure.py
|
recipes/recipe_modules/gclient/tests/sync_failure.py
|
from recipe_engine import post_process
DEPS = ['gclient']
def RunSteps(api):
src_cfg = api.gclient.make_config(CACHE_DIR='[ROOT]/git_cache')
api.gclient.sync(src_cfg)
def GenTests(api):
yield api.test(
'no-json',
api.override_step_data('gclient sync', retcode=1),
api.post_check(
lambda check, steps:
check(not steps['$result']['failure']['humanReason']
.startswith('Uncaught Exception'))),
api.post_process(post_process.DropExpectation)
)
|
from recipe_engine import post_process
DEPS = ['gclient']
def RunSteps(api):
src_cfg = api.gclient.make_config(CACHE_DIR='[ROOT]/git_cache')
api.gclient.sync(src_cfg)
def GenTests(api):
yield api.test(
'no-json',
api.override_step_data('gclient sync', retcode=1),
# Should not fail with uncaught exception
api.post_process(post_process.ResultReasonRE, r'^(?!Uncaught Exception)'),
api.post_process(post_process.DropExpectation)
)
|
Replace customzied test failure assertion with ResultReasonRE from engine
|
Replace customzied test failure assertion with ResultReasonRE from engine
This change is to facilitate the annotation protocol -> luciexe protocol
migration in the future. The failure response structure will be changed
after the migration. Therefore, we only need to change the
implementation detail of ResultReasonRE at that time.
R=iannucci
Change-Id: If5e0005dddcaf6ccdfbcb047e3855763cf4eadc5
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/2146066
Auto-Submit: Yiwei Zhang <[email protected]>
Reviewed-by: Robbie Iannucci <[email protected]>
Commit-Queue: Robbie Iannucci <[email protected]>
|
Python
|
bsd-3-clause
|
CoherentLabs/depot_tools,CoherentLabs/depot_tools
|
from recipe_engine import post_process
DEPS = ['gclient']
def RunSteps(api):
src_cfg = api.gclient.make_config(CACHE_DIR='[ROOT]/git_cache')
api.gclient.sync(src_cfg)
def GenTests(api):
yield api.test(
'no-json',
api.override_step_data('gclient sync', retcode=1),
+ # Should not fail with uncaught exception
+ api.post_process(post_process.ResultReasonRE, r'^(?!Uncaught Exception)'),
- api.post_check(
- lambda check, steps:
- check(not steps['$result']['failure']['humanReason']
- .startswith('Uncaught Exception'))),
api.post_process(post_process.DropExpectation)
)
|
Replace customzied test failure assertion with ResultReasonRE from engine
|
## Code Before:
from recipe_engine import post_process
DEPS = ['gclient']
def RunSteps(api):
src_cfg = api.gclient.make_config(CACHE_DIR='[ROOT]/git_cache')
api.gclient.sync(src_cfg)
def GenTests(api):
yield api.test(
'no-json',
api.override_step_data('gclient sync', retcode=1),
api.post_check(
lambda check, steps:
check(not steps['$result']['failure']['humanReason']
.startswith('Uncaught Exception'))),
api.post_process(post_process.DropExpectation)
)
## Instruction:
Replace customzied test failure assertion with ResultReasonRE from engine
## Code After:
from recipe_engine import post_process
DEPS = ['gclient']
def RunSteps(api):
src_cfg = api.gclient.make_config(CACHE_DIR='[ROOT]/git_cache')
api.gclient.sync(src_cfg)
def GenTests(api):
yield api.test(
'no-json',
api.override_step_data('gclient sync', retcode=1),
# Should not fail with uncaught exception
api.post_process(post_process.ResultReasonRE, r'^(?!Uncaught Exception)'),
api.post_process(post_process.DropExpectation)
)
|
from recipe_engine import post_process
DEPS = ['gclient']
def RunSteps(api):
src_cfg = api.gclient.make_config(CACHE_DIR='[ROOT]/git_cache')
api.gclient.sync(src_cfg)
def GenTests(api):
yield api.test(
'no-json',
api.override_step_data('gclient sync', retcode=1),
+ # Should not fail with uncaught exception
+ api.post_process(post_process.ResultReasonRE, r'^(?!Uncaught Exception)'),
- api.post_check(
- lambda check, steps:
- check(not steps['$result']['failure']['humanReason']
- .startswith('Uncaught Exception'))),
api.post_process(post_process.DropExpectation)
)
|
68fe680266f705bea2b33e614d7aac2ae13b46a2
|
url_shortener/forms.py
|
url_shortener/forms.py
|
from flask_wtf import Form
from wtforms import StringField, validators
from .validation import not_spam
class ShortenedUrlForm(Form):
url = StringField(
'Url to be shortened',
[
validators.DataRequired(),
validators.URL(message="A valid url is required"),
not_spam
]
)
|
from flask_wtf import Form
from wtforms import StringField, validators
from .validation import not_blacklisted_nor_spam
class ShortenedUrlForm(Form):
url = StringField(
'Url to be shortened',
[
validators.DataRequired(),
validators.URL(message="A valid url is required"),
not_blacklisted_nor_spam
]
)
|
Replace not_spam validator with not_blacklisted_nor_spam in form class
|
Replace not_spam validator with not_blacklisted_nor_spam in form class
|
Python
|
mit
|
piotr-rusin/url-shortener,piotr-rusin/url-shortener
|
from flask_wtf import Form
from wtforms import StringField, validators
- from .validation import not_spam
+ from .validation import not_blacklisted_nor_spam
class ShortenedUrlForm(Form):
url = StringField(
'Url to be shortened',
[
validators.DataRequired(),
validators.URL(message="A valid url is required"),
- not_spam
+ not_blacklisted_nor_spam
]
)
|
Replace not_spam validator with not_blacklisted_nor_spam in form class
|
## Code Before:
from flask_wtf import Form
from wtforms import StringField, validators
from .validation import not_spam
class ShortenedUrlForm(Form):
url = StringField(
'Url to be shortened',
[
validators.DataRequired(),
validators.URL(message="A valid url is required"),
not_spam
]
)
## Instruction:
Replace not_spam validator with not_blacklisted_nor_spam in form class
## Code After:
from flask_wtf import Form
from wtforms import StringField, validators
from .validation import not_blacklisted_nor_spam
class ShortenedUrlForm(Form):
url = StringField(
'Url to be shortened',
[
validators.DataRequired(),
validators.URL(message="A valid url is required"),
not_blacklisted_nor_spam
]
)
|
from flask_wtf import Form
from wtforms import StringField, validators
- from .validation import not_spam
+ from .validation import not_blacklisted_nor_spam
? ++++++++++++++++
class ShortenedUrlForm(Form):
url = StringField(
'Url to be shortened',
[
validators.DataRequired(),
validators.URL(message="A valid url is required"),
- not_spam
+ not_blacklisted_nor_spam
]
)
|
2267f31ba91ea649c54a51ab3e8f3babbe72f44e
|
openliveq/collection.py
|
openliveq/collection.py
|
from collections import defaultdict
class Collection(object):
DOC_FROM = ["question_body", "best_answer_body"]
def __init__(self):
'''
Compute the following statistics
df: document frequency
cf: collection frequency
dn: total number of documents
cn: total number of words
'''
self.df = defaultdict(int)
self.cf = defaultdict(int)
self.dn = 0
self.cn = 0
def add(self, wordsets):
'''
Add a question
'''
for label in self.DOC_FROM:
for w in set(wordsets[label].keys()):
self.df[w] += 1
self.cf[w] += wordsets[label][w]
self.cn += wordsets[label][w]
self.dn += 1
|
from collections import defaultdict
class Collection(object):
DOC_FROM = ["question_body", "best_answer_body"]
def __init__(self):
'''
Compute the following statistics
df: document frequency
cf: collection frequency
dn: total number of documents
cn: total number of words
'''
self.df = defaultdict(int)
self.cf = defaultdict(int)
self.dn = 0
self.cn = 0
def add(self, wordsets):
'''
Add a question
'''
for label in self.DOC_FROM:
for w in set(wordsets[label].keys()):
self.df[w] += 1
self.cf[w] += wordsets[label][w]
self.cn += wordsets[label][w]
self.dn += 1
@property
def avgdlen(self):
return float(self.cn) / self.dn
|
Add avddlen property to Collection
|
Add avddlen property to Collection
|
Python
|
mit
|
mpkato/openliveq
|
from collections import defaultdict
class Collection(object):
DOC_FROM = ["question_body", "best_answer_body"]
def __init__(self):
'''
Compute the following statistics
df: document frequency
cf: collection frequency
dn: total number of documents
cn: total number of words
'''
self.df = defaultdict(int)
self.cf = defaultdict(int)
self.dn = 0
self.cn = 0
def add(self, wordsets):
'''
Add a question
'''
for label in self.DOC_FROM:
for w in set(wordsets[label].keys()):
self.df[w] += 1
self.cf[w] += wordsets[label][w]
self.cn += wordsets[label][w]
self.dn += 1
+ @property
+ def avgdlen(self):
+ return float(self.cn) / self.dn
+
|
Add avddlen property to Collection
|
## Code Before:
from collections import defaultdict
class Collection(object):
DOC_FROM = ["question_body", "best_answer_body"]
def __init__(self):
'''
Compute the following statistics
df: document frequency
cf: collection frequency
dn: total number of documents
cn: total number of words
'''
self.df = defaultdict(int)
self.cf = defaultdict(int)
self.dn = 0
self.cn = 0
def add(self, wordsets):
'''
Add a question
'''
for label in self.DOC_FROM:
for w in set(wordsets[label].keys()):
self.df[w] += 1
self.cf[w] += wordsets[label][w]
self.cn += wordsets[label][w]
self.dn += 1
## Instruction:
Add avddlen property to Collection
## Code After:
from collections import defaultdict
class Collection(object):
DOC_FROM = ["question_body", "best_answer_body"]
def __init__(self):
'''
Compute the following statistics
df: document frequency
cf: collection frequency
dn: total number of documents
cn: total number of words
'''
self.df = defaultdict(int)
self.cf = defaultdict(int)
self.dn = 0
self.cn = 0
def add(self, wordsets):
'''
Add a question
'''
for label in self.DOC_FROM:
for w in set(wordsets[label].keys()):
self.df[w] += 1
self.cf[w] += wordsets[label][w]
self.cn += wordsets[label][w]
self.dn += 1
@property
def avgdlen(self):
return float(self.cn) / self.dn
|
from collections import defaultdict
class Collection(object):
DOC_FROM = ["question_body", "best_answer_body"]
def __init__(self):
'''
Compute the following statistics
df: document frequency
cf: collection frequency
dn: total number of documents
cn: total number of words
'''
self.df = defaultdict(int)
self.cf = defaultdict(int)
self.dn = 0
self.cn = 0
def add(self, wordsets):
'''
Add a question
'''
for label in self.DOC_FROM:
for w in set(wordsets[label].keys()):
self.df[w] += 1
self.cf[w] += wordsets[label][w]
self.cn += wordsets[label][w]
self.dn += 1
+
+ @property
+ def avgdlen(self):
+ return float(self.cn) / self.dn
|
d84f42d45bb16820fb0077c9f0f92ba88e24d5de
|
cabot/cabotapp/jenkins.py
|
cabot/cabotapp/jenkins.py
|
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
if settings.JENKINS_USER:
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
else:
auth = None
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
Fix Jenkins tests when no user is set
|
Fix Jenkins tests when no user is set
|
Python
|
mit
|
Affirm/cabot,Affirm/cabot,Affirm/cabot,Affirm/cabot
|
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
+ if settings.JENKINS_USER:
- auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
+ auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
+ else:
+ auth = None
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
Fix Jenkins tests when no user is set
|
## Code Before:
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
## Instruction:
Fix Jenkins tests when no user is set
## Code After:
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
if settings.JENKINS_USER:
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
else:
auth = None
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
+ if settings.JENKINS_USER:
- auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
+ auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
? ++++
+ else:
+ auth = None
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
5e2dbeab75501254da598c6c401cbbd8446f01a5
|
util/timeline_adjust.py
|
util/timeline_adjust.py
|
from __future__ import print_function
import argparse
import re
time_re = re.compile(r"^\s*#?\s*([0-9]+(?:\.[0-9]+)?)\s+\"")
first_num_re = re.compile(r"([0-9]+(?:\.[0-9]+)?)")
def adjust_lines(lines, adjust):
for line in lines:
match = re.match(time_re, line)
if match:
time = float(match.group(1)) + adjust
print(re.sub(first_num_re, str(time), line, 1), end='')
else:
print(line, end='')
def main():
parser = argparse.ArgumentParser(
description="A utility to uniformly adjust times in an act timeline file")
parser.add_argument('--file', required=True, type=argparse.FileType('r', 0),
help="The timeline file to adjust times in")
parser.add_argument('--adjust', required=True, type=float,
help="The amount of time to adjust each entry by")
args = parser.parse_args()
adjust_lines(args.file, args.adjust)
if __name__ == "__main__":
main()
|
from __future__ import print_function
import argparse
import re
time_re = re.compile(r"^\s*#?\s*([0-9]+(?:\.[0-9]+)?)\s+\"")
first_num_re = re.compile(r"([0-9]+(?:\.[0-9]+)?)")
def adjust_lines(lines, adjust):
for line in lines:
match = re.match(time_re, line)
if match:
time = float(match.group(1)) + adjust
print(re.sub(first_num_re, str(time), line, 1), end='')
else:
print(line, end='')
def main():
parser = argparse.ArgumentParser(
description="A utility to uniformly adjust times in an act timeline file")
parser.add_argument('--file', required=True, type=argparse.FileType('r', encoding="utf8"),
help="The timeline file to adjust times in")
parser.add_argument('--adjust', required=True, type=float,
help="The amount of time to adjust each entry by")
args = parser.parse_args()
adjust_lines(args.file, args.adjust)
if __name__ == "__main__":
main()
|
Fix timeline adjust not being able to load Windows files
|
Fix timeline adjust not being able to load Windows files
|
Python
|
apache-2.0
|
quisquous/cactbot,quisquous/cactbot,quisquous/cactbot,sqt/cactbot,sqt/cactbot,sqt/cactbot,sqt/cactbot,quisquous/cactbot,quisquous/cactbot,sqt/cactbot,quisquous/cactbot
|
from __future__ import print_function
import argparse
import re
time_re = re.compile(r"^\s*#?\s*([0-9]+(?:\.[0-9]+)?)\s+\"")
first_num_re = re.compile(r"([0-9]+(?:\.[0-9]+)?)")
def adjust_lines(lines, adjust):
for line in lines:
match = re.match(time_re, line)
if match:
time = float(match.group(1)) + adjust
print(re.sub(first_num_re, str(time), line, 1), end='')
else:
print(line, end='')
def main():
parser = argparse.ArgumentParser(
description="A utility to uniformly adjust times in an act timeline file")
- parser.add_argument('--file', required=True, type=argparse.FileType('r', 0),
+ parser.add_argument('--file', required=True, type=argparse.FileType('r', encoding="utf8"),
help="The timeline file to adjust times in")
parser.add_argument('--adjust', required=True, type=float,
help="The amount of time to adjust each entry by")
args = parser.parse_args()
adjust_lines(args.file, args.adjust)
if __name__ == "__main__":
main()
|
Fix timeline adjust not being able to load Windows files
|
## Code Before:
from __future__ import print_function
import argparse
import re
time_re = re.compile(r"^\s*#?\s*([0-9]+(?:\.[0-9]+)?)\s+\"")
first_num_re = re.compile(r"([0-9]+(?:\.[0-9]+)?)")
def adjust_lines(lines, adjust):
for line in lines:
match = re.match(time_re, line)
if match:
time = float(match.group(1)) + adjust
print(re.sub(first_num_re, str(time), line, 1), end='')
else:
print(line, end='')
def main():
parser = argparse.ArgumentParser(
description="A utility to uniformly adjust times in an act timeline file")
parser.add_argument('--file', required=True, type=argparse.FileType('r', 0),
help="The timeline file to adjust times in")
parser.add_argument('--adjust', required=True, type=float,
help="The amount of time to adjust each entry by")
args = parser.parse_args()
adjust_lines(args.file, args.adjust)
if __name__ == "__main__":
main()
## Instruction:
Fix timeline adjust not being able to load Windows files
## Code After:
from __future__ import print_function
import argparse
import re
time_re = re.compile(r"^\s*#?\s*([0-9]+(?:\.[0-9]+)?)\s+\"")
first_num_re = re.compile(r"([0-9]+(?:\.[0-9]+)?)")
def adjust_lines(lines, adjust):
for line in lines:
match = re.match(time_re, line)
if match:
time = float(match.group(1)) + adjust
print(re.sub(first_num_re, str(time), line, 1), end='')
else:
print(line, end='')
def main():
parser = argparse.ArgumentParser(
description="A utility to uniformly adjust times in an act timeline file")
parser.add_argument('--file', required=True, type=argparse.FileType('r', encoding="utf8"),
help="The timeline file to adjust times in")
parser.add_argument('--adjust', required=True, type=float,
help="The amount of time to adjust each entry by")
args = parser.parse_args()
adjust_lines(args.file, args.adjust)
if __name__ == "__main__":
main()
|
from __future__ import print_function
import argparse
import re
time_re = re.compile(r"^\s*#?\s*([0-9]+(?:\.[0-9]+)?)\s+\"")
first_num_re = re.compile(r"([0-9]+(?:\.[0-9]+)?)")
def adjust_lines(lines, adjust):
for line in lines:
match = re.match(time_re, line)
if match:
time = float(match.group(1)) + adjust
print(re.sub(first_num_re, str(time), line, 1), end='')
else:
print(line, end='')
def main():
parser = argparse.ArgumentParser(
description="A utility to uniformly adjust times in an act timeline file")
- parser.add_argument('--file', required=True, type=argparse.FileType('r', 0),
? ^
+ parser.add_argument('--file', required=True, type=argparse.FileType('r', encoding="utf8"),
? ^^^^^^^^^^^^^^^
help="The timeline file to adjust times in")
parser.add_argument('--adjust', required=True, type=float,
help="The amount of time to adjust each entry by")
args = parser.parse_args()
adjust_lines(args.file, args.adjust)
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.