commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
338a6e8da75a5b950949638b1a810510419450e9
|
scripts/state_and_transition.py
|
scripts/state_and_transition.py
|
from auto_number import AutoNumber
class STATE(AutoNumber):
Start = ()
Following_waypoint = ()
Avoiding_obstacle = ()
Driving_toward_cone = ()
Driving_away_from_cone = ()
Success = ()
Failure = ()
End = ()
class TRANSITION(AutoNumber):
obstacle_seen = ()
near_cone = ()
obstacle_cleared = ()
touched_cone = ()
passed_cone = ()
segment_timeout = ()
touched_last_cone = ()
passed_last_cone = ()
course_timeout = ()
|
from auto_number import AutoNumber
class STATE(AutoNumber):
Start = ()
Following_waypoint = ()
Avoiding_obstacle = ()
Driving_toward_cone = ()
Driving_away_from_cone = ()
Success = ()
Failure = ()
End = ()
class TRANSITION(AutoNumber):
obstacle_seen = ()
near_cone = ()
obstacle_cleared = ()
touched_cone = ()
passed_cone = ()
segment_timeout = ()
touched_last_cone = ()
passed_last_cone = ()
course_timeout = ()
cleared_cone = ()
|
Add new state for driving away from cone
|
Add new state for driving away from cone
|
Python
|
apache-2.0
|
ProgrammingRobotsStudyGroup/robo_magellan,ProgrammingRobotsStudyGroup/robo_magellan,ProgrammingRobotsStudyGroup/robo_magellan,ProgrammingRobotsStudyGroup/robo_magellan
|
from auto_number import AutoNumber
class STATE(AutoNumber):
Start = ()
Following_waypoint = ()
Avoiding_obstacle = ()
Driving_toward_cone = ()
Driving_away_from_cone = ()
Success = ()
Failure = ()
End = ()
class TRANSITION(AutoNumber):
obstacle_seen = ()
near_cone = ()
obstacle_cleared = ()
touched_cone = ()
passed_cone = ()
segment_timeout = ()
touched_last_cone = ()
passed_last_cone = ()
course_timeout = ()
+ cleared_cone = ()
|
Add new state for driving away from cone
|
## Code Before:
from auto_number import AutoNumber
class STATE(AutoNumber):
Start = ()
Following_waypoint = ()
Avoiding_obstacle = ()
Driving_toward_cone = ()
Driving_away_from_cone = ()
Success = ()
Failure = ()
End = ()
class TRANSITION(AutoNumber):
obstacle_seen = ()
near_cone = ()
obstacle_cleared = ()
touched_cone = ()
passed_cone = ()
segment_timeout = ()
touched_last_cone = ()
passed_last_cone = ()
course_timeout = ()
## Instruction:
Add new state for driving away from cone
## Code After:
from auto_number import AutoNumber
class STATE(AutoNumber):
Start = ()
Following_waypoint = ()
Avoiding_obstacle = ()
Driving_toward_cone = ()
Driving_away_from_cone = ()
Success = ()
Failure = ()
End = ()
class TRANSITION(AutoNumber):
obstacle_seen = ()
near_cone = ()
obstacle_cleared = ()
touched_cone = ()
passed_cone = ()
segment_timeout = ()
touched_last_cone = ()
passed_last_cone = ()
course_timeout = ()
cleared_cone = ()
|
from auto_number import AutoNumber
class STATE(AutoNumber):
Start = ()
Following_waypoint = ()
Avoiding_obstacle = ()
Driving_toward_cone = ()
Driving_away_from_cone = ()
Success = ()
Failure = ()
End = ()
class TRANSITION(AutoNumber):
obstacle_seen = ()
near_cone = ()
obstacle_cleared = ()
touched_cone = ()
passed_cone = ()
segment_timeout = ()
touched_last_cone = ()
passed_last_cone = ()
course_timeout = ()
+ cleared_cone = ()
|
a71c6c03b02a15674fac0995d120f5c2180e8767
|
plugin/floo/sublime.py
|
plugin/floo/sublime.py
|
from collections import defaultdict
import time
TIMEOUTS = defaultdict(list)
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
then = time.time() + timeout
TIMEOUTS[then].append(lambda: func(*args, **kwargs))
def call_timeouts():
now = time.time()
to_remove = []
for t, timeouts in TIMEOUTS.items():
if now >= t:
for timeout in timeouts:
timeout()
to_remove.append(t)
for k in to_remove:
del TIMEOUTS[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
|
from collections import defaultdict
import time
TIMEOUTS = defaultdict(list)
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
then = time.time() + (timeout / 1000.0)
TIMEOUTS[then].append(lambda: func(*args, **kwargs))
def call_timeouts():
now = time.time()
to_remove = []
for t, timeouts in TIMEOUTS.items():
if now >= t:
for timeout in timeouts:
timeout()
to_remove.append(t)
for k in to_remove:
del TIMEOUTS[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
|
Fix off by 1000 error
|
Fix off by 1000 error
|
Python
|
apache-2.0
|
Floobits/floobits-neovim,Floobits/floobits-neovim-old,Floobits/floobits-vim
|
from collections import defaultdict
import time
TIMEOUTS = defaultdict(list)
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
- then = time.time() + timeout
+ then = time.time() + (timeout / 1000.0)
TIMEOUTS[then].append(lambda: func(*args, **kwargs))
def call_timeouts():
now = time.time()
to_remove = []
for t, timeouts in TIMEOUTS.items():
if now >= t:
for timeout in timeouts:
timeout()
to_remove.append(t)
for k in to_remove:
del TIMEOUTS[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
|
Fix off by 1000 error
|
## Code Before:
from collections import defaultdict
import time
TIMEOUTS = defaultdict(list)
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
then = time.time() + timeout
TIMEOUTS[then].append(lambda: func(*args, **kwargs))
def call_timeouts():
now = time.time()
to_remove = []
for t, timeouts in TIMEOUTS.items():
if now >= t:
for timeout in timeouts:
timeout()
to_remove.append(t)
for k in to_remove:
del TIMEOUTS[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
## Instruction:
Fix off by 1000 error
## Code After:
from collections import defaultdict
import time
TIMEOUTS = defaultdict(list)
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
then = time.time() + (timeout / 1000.0)
TIMEOUTS[then].append(lambda: func(*args, **kwargs))
def call_timeouts():
now = time.time()
to_remove = []
for t, timeouts in TIMEOUTS.items():
if now >= t:
for timeout in timeouts:
timeout()
to_remove.append(t)
for k in to_remove:
del TIMEOUTS[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
|
from collections import defaultdict
import time
TIMEOUTS = defaultdict(list)
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
- then = time.time() + timeout
+ then = time.time() + (timeout / 1000.0)
? + ++++++++++
TIMEOUTS[then].append(lambda: func(*args, **kwargs))
def call_timeouts():
now = time.time()
to_remove = []
for t, timeouts in TIMEOUTS.items():
if now >= t:
for timeout in timeouts:
timeout()
to_remove.append(t)
for k in to_remove:
del TIMEOUTS[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
|
14c86e3c93bd5114b74c125fdb8213b22342c95c
|
tests/manual_cleanup.py
|
tests/manual_cleanup.py
|
from globus_cli.services.transfer import get_client as get_tc
from tests.framework.cli_testcase import default_test_config
try:
from mock import patch
except ImportError:
from unittest.mock import patch
def cleanup_bookmarks(tc):
for bm in tc.bookmark_list():
tc.delete_bookmark(bm['id'])
@patch("globus_cli.config.get_config_obj", new=default_test_config)
def main():
tc = get_tc()
cleanup_bookmarks(tc)
if __name__ == '__main__':
main()
|
import click
from globus_cli.services.transfer import get_client as get_tc
from tests.framework.cli_testcase import default_test_config
try:
from mock import patch
except ImportError:
from unittest.mock import patch
def cleanup_bookmarks(tc):
for bm in tc.bookmark_list():
tc.delete_bookmark(bm['id'])
def cleanup_tasks(tc):
tasks = tc.task_list(num_results=None, filter="status:ACTIVE,INACTIVE")
for t in tasks:
tc.cancel_task(t['task_id'])
@click.command("cleanup")
@click.option("--cancel-jobs", is_flag=True)
def main(cancel_jobs):
with patch("globus_cli.config.get_config_obj", new=default_test_config):
tc = get_tc()
cleanup_bookmarks(tc)
if cancel_jobs:
cleanup_tasks(tc)
if __name__ == '__main__':
main()
|
Add `--cancel-jobs` to manual cleanup script
|
Add `--cancel-jobs` to manual cleanup script
Add an option to this script to cancel all ACTIVE,INACTIVE tasks (i.e.
not SUCCEDED,FAILED).
While this can disrupt a run of the tests pretty badly if you run it
while the tets are running, it's pretty much the only way to "fix it" if
the tests go off the rails because of a partial or failed job, etc.
|
Python
|
apache-2.0
|
globus/globus-cli,globus/globus-cli
|
+ import click
+
from globus_cli.services.transfer import get_client as get_tc
from tests.framework.cli_testcase import default_test_config
try:
from mock import patch
except ImportError:
from unittest.mock import patch
def cleanup_bookmarks(tc):
for bm in tc.bookmark_list():
tc.delete_bookmark(bm['id'])
+ def cleanup_tasks(tc):
+ tasks = tc.task_list(num_results=None, filter="status:ACTIVE,INACTIVE")
+ for t in tasks:
+ tc.cancel_task(t['task_id'])
+
+
+ @click.command("cleanup")
+ @click.option("--cancel-jobs", is_flag=True)
+ def main(cancel_jobs):
- @patch("globus_cli.config.get_config_obj", new=default_test_config)
+ with patch("globus_cli.config.get_config_obj", new=default_test_config):
- def main():
- tc = get_tc()
+ tc = get_tc()
- cleanup_bookmarks(tc)
+ cleanup_bookmarks(tc)
+
+ if cancel_jobs:
+ cleanup_tasks(tc)
if __name__ == '__main__':
main()
|
Add `--cancel-jobs` to manual cleanup script
|
## Code Before:
from globus_cli.services.transfer import get_client as get_tc
from tests.framework.cli_testcase import default_test_config
try:
from mock import patch
except ImportError:
from unittest.mock import patch
def cleanup_bookmarks(tc):
for bm in tc.bookmark_list():
tc.delete_bookmark(bm['id'])
@patch("globus_cli.config.get_config_obj", new=default_test_config)
def main():
tc = get_tc()
cleanup_bookmarks(tc)
if __name__ == '__main__':
main()
## Instruction:
Add `--cancel-jobs` to manual cleanup script
## Code After:
import click
from globus_cli.services.transfer import get_client as get_tc
from tests.framework.cli_testcase import default_test_config
try:
from mock import patch
except ImportError:
from unittest.mock import patch
def cleanup_bookmarks(tc):
for bm in tc.bookmark_list():
tc.delete_bookmark(bm['id'])
def cleanup_tasks(tc):
tasks = tc.task_list(num_results=None, filter="status:ACTIVE,INACTIVE")
for t in tasks:
tc.cancel_task(t['task_id'])
@click.command("cleanup")
@click.option("--cancel-jobs", is_flag=True)
def main(cancel_jobs):
with patch("globus_cli.config.get_config_obj", new=default_test_config):
tc = get_tc()
cleanup_bookmarks(tc)
if cancel_jobs:
cleanup_tasks(tc)
if __name__ == '__main__':
main()
|
+ import click
+
from globus_cli.services.transfer import get_client as get_tc
from tests.framework.cli_testcase import default_test_config
try:
from mock import patch
except ImportError:
from unittest.mock import patch
def cleanup_bookmarks(tc):
for bm in tc.bookmark_list():
tc.delete_bookmark(bm['id'])
+ def cleanup_tasks(tc):
+ tasks = tc.task_list(num_results=None, filter="status:ACTIVE,INACTIVE")
+ for t in tasks:
+ tc.cancel_task(t['task_id'])
+
+
+ @click.command("cleanup")
+ @click.option("--cancel-jobs", is_flag=True)
+ def main(cancel_jobs):
- @patch("globus_cli.config.get_config_obj", new=default_test_config)
? ^
+ with patch("globus_cli.config.get_config_obj", new=default_test_config):
? ^^^^^^^^^ +
- def main():
- tc = get_tc()
+ tc = get_tc()
? ++++
- cleanup_bookmarks(tc)
+ cleanup_bookmarks(tc)
? ++++
+
+ if cancel_jobs:
+ cleanup_tasks(tc)
if __name__ == '__main__':
main()
|
d7f3ea41bc3d252d786a339fc34337f01e1cc3eb
|
django_dbq/migrations/0001_initial.py
|
django_dbq/migrations/0001_initial.py
|
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import uuid
try:
from django.db.models import UUIDField
except ImportError:
from django_dbq.fields import UUIDField
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)),
('created', models.DateTimeField(db_index=True, auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=100)),
('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])),
('next_task', models.CharField(max_length=100, blank=True)),
('workspace', jsonfield.fields.JSONField(null=True)),
('queue_name', models.CharField(db_index=True, max_length=20, default='default')),
],
options={
'ordering': ['-created'],
},
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import uuid
from django.db.models import UUIDField
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)),
('created', models.DateTimeField(db_index=True, auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=100)),
('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])),
('next_task', models.CharField(max_length=100, blank=True)),
('workspace', jsonfield.fields.JSONField(null=True)),
('queue_name', models.CharField(db_index=True, max_length=20, default='default')),
],
options={
'ordering': ['-created'],
},
),
]
|
Remove reference to old UUIDfield in django migration
|
Remove reference to old UUIDfield in django migration
|
Python
|
bsd-2-clause
|
dabapps/django-db-queue
|
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import uuid
- try:
- from django.db.models import UUIDField
+ from django.db.models import UUIDField
- except ImportError:
- from django_dbq.fields import UUIDField
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)),
('created', models.DateTimeField(db_index=True, auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=100)),
('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])),
('next_task', models.CharField(max_length=100, blank=True)),
('workspace', jsonfield.fields.JSONField(null=True)),
('queue_name', models.CharField(db_index=True, max_length=20, default='default')),
],
options={
'ordering': ['-created'],
},
),
]
|
Remove reference to old UUIDfield in django migration
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import uuid
try:
from django.db.models import UUIDField
except ImportError:
from django_dbq.fields import UUIDField
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)),
('created', models.DateTimeField(db_index=True, auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=100)),
('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])),
('next_task', models.CharField(max_length=100, blank=True)),
('workspace', jsonfield.fields.JSONField(null=True)),
('queue_name', models.CharField(db_index=True, max_length=20, default='default')),
],
options={
'ordering': ['-created'],
},
),
]
## Instruction:
Remove reference to old UUIDfield in django migration
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import uuid
from django.db.models import UUIDField
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)),
('created', models.DateTimeField(db_index=True, auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=100)),
('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])),
('next_task', models.CharField(max_length=100, blank=True)),
('workspace', jsonfield.fields.JSONField(null=True)),
('queue_name', models.CharField(db_index=True, max_length=20, default='default')),
],
options={
'ordering': ['-created'],
},
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import uuid
- try:
- from django.db.models import UUIDField
? ----
+ from django.db.models import UUIDField
- except ImportError:
- from django_dbq.fields import UUIDField
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)),
('created', models.DateTimeField(db_index=True, auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=100)),
('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])),
('next_task', models.CharField(max_length=100, blank=True)),
('workspace', jsonfield.fields.JSONField(null=True)),
('queue_name', models.CharField(db_index=True, max_length=20, default='default')),
],
options={
'ordering': ['-created'],
},
),
]
|
311b0d5a0baabbb9c1476a156dbae1b919478704
|
src/upgradegit/cli.py
|
src/upgradegit/cli.py
|
import click
import requirements
import os
import re
@click.command()
@click.option('--file', default='requirements.txt', help='File to upgrade')
@click.option('--branch', default='master', help='Branch to upgrade from')
def upgrade(file, branch):
lines = []
with open(file, 'r') as f:
for req in requirements.parse(f):
line = ''
if (req.uri):
reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))'
uri = req.uri.replace('git+ssh://', 'ssh://git@')
cmd = 'git ls-remote {} {} HEAD'.format(uri, branch)
result = os.popen(cmd).read()
result = result.strip()
results = re.findall(reg, result)
result = results[0][0]
line = re.sub(r'\@([0-9a-f]*)(?=(#|$))', '@'+result, req.line)
else:
name = req.name
spec_op = req.specs[0][0]
spec_ver = req.specs[0][1]
line = '{name}{spec_op}{spec_ver}'.format(
name=name, spec_op=spec_op, spec_ver=spec_ver)
lines.append(line)
with open(file, 'w') as f:
for line in lines:
f.write(line+'\n')
if __name__ == '__main__':
upgrade()
|
import click
import requirements
import os
import re
@click.command()
@click.option('--file', default='requirements.txt', help='File to upgrade')
@click.option('--branch', default='master', help='Branch to upgrade from')
def upgrade(file, branch):
lines = []
with open(file, 'r') as f:
for req in requirements.parse(f):
line = ''
if (req.uri):
reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))'
uri = req.uri.replace('git+ssh://', 'ssh://git@')
cmd = 'git ls-remote {} {} HEAD'.format(uri, branch)
result = os.popen(cmd).read()
result = result.strip()
results = re.findall(reg, result)
result = results[0][0]
line = re.sub(r'.git(?=(#|$))', '.git@'+result, req.line)
else:
name = req.name
spec_op = req.specs[0][0]
spec_ver = req.specs[0][1]
line = '{name}{spec_op}{spec_ver}'.format(
name=name, spec_op=spec_op, spec_ver=spec_ver)
lines.append(line)
with open(file, 'w') as f:
for line in lines:
f.write(line+'\n')
if __name__ == '__main__':
upgrade()
|
Allow for requirements without a hash
|
Allow for requirements without a hash
|
Python
|
mit
|
bevanmw/gitupgrade
|
import click
import requirements
import os
import re
@click.command()
@click.option('--file', default='requirements.txt', help='File to upgrade')
@click.option('--branch', default='master', help='Branch to upgrade from')
def upgrade(file, branch):
lines = []
with open(file, 'r') as f:
for req in requirements.parse(f):
line = ''
if (req.uri):
reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))'
uri = req.uri.replace('git+ssh://', 'ssh://git@')
cmd = 'git ls-remote {} {} HEAD'.format(uri, branch)
result = os.popen(cmd).read()
result = result.strip()
results = re.findall(reg, result)
result = results[0][0]
- line = re.sub(r'\@([0-9a-f]*)(?=(#|$))', '@'+result, req.line)
+ line = re.sub(r'.git(?=(#|$))', '.git@'+result, req.line)
else:
name = req.name
spec_op = req.specs[0][0]
spec_ver = req.specs[0][1]
line = '{name}{spec_op}{spec_ver}'.format(
name=name, spec_op=spec_op, spec_ver=spec_ver)
lines.append(line)
with open(file, 'w') as f:
for line in lines:
f.write(line+'\n')
if __name__ == '__main__':
upgrade()
|
Allow for requirements without a hash
|
## Code Before:
import click
import requirements
import os
import re
@click.command()
@click.option('--file', default='requirements.txt', help='File to upgrade')
@click.option('--branch', default='master', help='Branch to upgrade from')
def upgrade(file, branch):
lines = []
with open(file, 'r') as f:
for req in requirements.parse(f):
line = ''
if (req.uri):
reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))'
uri = req.uri.replace('git+ssh://', 'ssh://git@')
cmd = 'git ls-remote {} {} HEAD'.format(uri, branch)
result = os.popen(cmd).read()
result = result.strip()
results = re.findall(reg, result)
result = results[0][0]
line = re.sub(r'\@([0-9a-f]*)(?=(#|$))', '@'+result, req.line)
else:
name = req.name
spec_op = req.specs[0][0]
spec_ver = req.specs[0][1]
line = '{name}{spec_op}{spec_ver}'.format(
name=name, spec_op=spec_op, spec_ver=spec_ver)
lines.append(line)
with open(file, 'w') as f:
for line in lines:
f.write(line+'\n')
if __name__ == '__main__':
upgrade()
## Instruction:
Allow for requirements without a hash
## Code After:
import click
import requirements
import os
import re
@click.command()
@click.option('--file', default='requirements.txt', help='File to upgrade')
@click.option('--branch', default='master', help='Branch to upgrade from')
def upgrade(file, branch):
lines = []
with open(file, 'r') as f:
for req in requirements.parse(f):
line = ''
if (req.uri):
reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))'
uri = req.uri.replace('git+ssh://', 'ssh://git@')
cmd = 'git ls-remote {} {} HEAD'.format(uri, branch)
result = os.popen(cmd).read()
result = result.strip()
results = re.findall(reg, result)
result = results[0][0]
line = re.sub(r'.git(?=(#|$))', '.git@'+result, req.line)
else:
name = req.name
spec_op = req.specs[0][0]
spec_ver = req.specs[0][1]
line = '{name}{spec_op}{spec_ver}'.format(
name=name, spec_op=spec_op, spec_ver=spec_ver)
lines.append(line)
with open(file, 'w') as f:
for line in lines:
f.write(line+'\n')
if __name__ == '__main__':
upgrade()
|
import click
import requirements
import os
import re
@click.command()
@click.option('--file', default='requirements.txt', help='File to upgrade')
@click.option('--branch', default='master', help='Branch to upgrade from')
def upgrade(file, branch):
lines = []
with open(file, 'r') as f:
for req in requirements.parse(f):
line = ''
if (req.uri):
reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))'
uri = req.uri.replace('git+ssh://', 'ssh://git@')
cmd = 'git ls-remote {} {} HEAD'.format(uri, branch)
result = os.popen(cmd).read()
result = result.strip()
results = re.findall(reg, result)
result = results[0][0]
- line = re.sub(r'\@([0-9a-f]*)(?=(#|$))', '@'+result, req.line)
? ^^^^^^^^^^^^^
+ line = re.sub(r'.git(?=(#|$))', '.git@'+result, req.line)
? ^^^^ ++++
else:
name = req.name
spec_op = req.specs[0][0]
spec_ver = req.specs[0][1]
line = '{name}{spec_op}{spec_ver}'.format(
name=name, spec_op=spec_op, spec_ver=spec_ver)
lines.append(line)
with open(file, 'w') as f:
for line in lines:
f.write(line+'\n')
if __name__ == '__main__':
upgrade()
|
a09689c570e70c80ad7cadd9702133b3851c63b9
|
providers/provider.py
|
providers/provider.py
|
import json
import requests
from requests.utils import get_unicode_from_response
from lxml import html as lxml_html
class BaseProvider(object):
# ==== HELPER METHODS ====
def parse_html(self, url, css_selector):
html = self._http_get(url)
document = lxml_html.document_fromstring(html)
results = document.cssselect(css_selector)
data = [result.text_content() for result in results]
return data
def traverse_json(self, data, path):
if not path:
return data
for item in path.split("."):
if item.isdigit():
item = int(item)
try:
data = data[item]
except (IndexError, KeyError):
return {}
return data
def parse_json(self, url, path=None):
data = self._http_get(url)
data = json.loads(data)
data = self.traverse_json(data, path)
return data
# ==== PRIVATE METHODS ====
def _http_get(self, url, timeout=60 * 60):
response = requests.get(url, timeout=10)
return get_unicode_from_response(response)
|
import json
import requests
from requests.utils import get_unicode_from_response
from lxml import html as lxml_html
class BaseProvider(object):
# ==== HELPER METHODS ====
def parse_html(self, url, css_selector, timeout=60):
html = self._http_get(url, timeout=timeout)
document = lxml_html.document_fromstring(html)
results = document.cssselect(css_selector)
data = [result.text_content() for result in results]
return data
def traverse_json(self, data, path):
if not path:
return data
for item in path.split("."):
if item.isdigit():
item = int(item)
try:
data = data[item]
except (IndexError, KeyError):
return {}
return data
def parse_json(self, url, path=None, timeout=60):
data = self._http_get(url, timeout=timeout)
data = json.loads(data)
data = self.traverse_json(data, path)
return data
# ==== PRIVATE METHODS ====
def _http_get(self, url, timeout=60):
response = requests.get(url, timeout=timeout)
return get_unicode_from_response(response)
|
Increase timeout to 60 sec and make available to external callers.
|
Increase timeout to 60 sec and make available to external callers.
|
Python
|
mit
|
EmilStenstrom/nephele
|
import json
import requests
from requests.utils import get_unicode_from_response
from lxml import html as lxml_html
class BaseProvider(object):
# ==== HELPER METHODS ====
- def parse_html(self, url, css_selector):
+ def parse_html(self, url, css_selector, timeout=60):
- html = self._http_get(url)
+ html = self._http_get(url, timeout=timeout)
document = lxml_html.document_fromstring(html)
results = document.cssselect(css_selector)
data = [result.text_content() for result in results]
return data
def traverse_json(self, data, path):
if not path:
return data
for item in path.split("."):
if item.isdigit():
item = int(item)
try:
data = data[item]
except (IndexError, KeyError):
return {}
return data
- def parse_json(self, url, path=None):
+ def parse_json(self, url, path=None, timeout=60):
- data = self._http_get(url)
+ data = self._http_get(url, timeout=timeout)
data = json.loads(data)
data = self.traverse_json(data, path)
return data
# ==== PRIVATE METHODS ====
- def _http_get(self, url, timeout=60 * 60):
+ def _http_get(self, url, timeout=60):
- response = requests.get(url, timeout=10)
+ response = requests.get(url, timeout=timeout)
return get_unicode_from_response(response)
|
Increase timeout to 60 sec and make available to external callers.
|
## Code Before:
import json
import requests
from requests.utils import get_unicode_from_response
from lxml import html as lxml_html
class BaseProvider(object):
# ==== HELPER METHODS ====
def parse_html(self, url, css_selector):
html = self._http_get(url)
document = lxml_html.document_fromstring(html)
results = document.cssselect(css_selector)
data = [result.text_content() for result in results]
return data
def traverse_json(self, data, path):
if not path:
return data
for item in path.split("."):
if item.isdigit():
item = int(item)
try:
data = data[item]
except (IndexError, KeyError):
return {}
return data
def parse_json(self, url, path=None):
data = self._http_get(url)
data = json.loads(data)
data = self.traverse_json(data, path)
return data
# ==== PRIVATE METHODS ====
def _http_get(self, url, timeout=60 * 60):
response = requests.get(url, timeout=10)
return get_unicode_from_response(response)
## Instruction:
Increase timeout to 60 sec and make available to external callers.
## Code After:
import json
import requests
from requests.utils import get_unicode_from_response
from lxml import html as lxml_html
class BaseProvider(object):
# ==== HELPER METHODS ====
def parse_html(self, url, css_selector, timeout=60):
html = self._http_get(url, timeout=timeout)
document = lxml_html.document_fromstring(html)
results = document.cssselect(css_selector)
data = [result.text_content() for result in results]
return data
def traverse_json(self, data, path):
if not path:
return data
for item in path.split("."):
if item.isdigit():
item = int(item)
try:
data = data[item]
except (IndexError, KeyError):
return {}
return data
def parse_json(self, url, path=None, timeout=60):
data = self._http_get(url, timeout=timeout)
data = json.loads(data)
data = self.traverse_json(data, path)
return data
# ==== PRIVATE METHODS ====
def _http_get(self, url, timeout=60):
response = requests.get(url, timeout=timeout)
return get_unicode_from_response(response)
|
import json
import requests
from requests.utils import get_unicode_from_response
from lxml import html as lxml_html
class BaseProvider(object):
# ==== HELPER METHODS ====
- def parse_html(self, url, css_selector):
+ def parse_html(self, url, css_selector, timeout=60):
? ++++++++++++
- html = self._http_get(url)
+ html = self._http_get(url, timeout=timeout)
? +++++++++++++++++
document = lxml_html.document_fromstring(html)
results = document.cssselect(css_selector)
data = [result.text_content() for result in results]
return data
def traverse_json(self, data, path):
if not path:
return data
for item in path.split("."):
if item.isdigit():
item = int(item)
try:
data = data[item]
except (IndexError, KeyError):
return {}
return data
- def parse_json(self, url, path=None):
+ def parse_json(self, url, path=None, timeout=60):
? ++++++++++++
- data = self._http_get(url)
+ data = self._http_get(url, timeout=timeout)
? +++++++++++++++++
data = json.loads(data)
data = self.traverse_json(data, path)
return data
# ==== PRIVATE METHODS ====
- def _http_get(self, url, timeout=60 * 60):
? -----
+ def _http_get(self, url, timeout=60):
- response = requests.get(url, timeout=10)
? ^^
+ response = requests.get(url, timeout=timeout)
? ^^^^^^^
return get_unicode_from_response(response)
|
1cab84d3f3726df2a7cfe4e5ad8efee81051c73e
|
tests/test_patched_stream.py
|
tests/test_patched_stream.py
|
import nose
import StringIO
import cle
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
|
import nose
import StringIO
import os
import cle
tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
def test_malformed_sections():
ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
nose.tools.assert_equal(len(ld.main_object.segments), 1)
nose.tools.assert_equal(len(ld.main_object.sections), 0)
|
Add tests for loading binaries with malformed sections
|
Add tests for loading binaries with malformed sections
|
Python
|
bsd-2-clause
|
angr/cle
|
import nose
import StringIO
+ import os
import cle
+
+ tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
+ def test_malformed_sections():
+ ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
+ nose.tools.assert_equal(len(ld.main_object.segments), 1)
+ nose.tools.assert_equal(len(ld.main_object.sections), 0)
+
|
Add tests for loading binaries with malformed sections
|
## Code Before:
import nose
import StringIO
import cle
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
## Instruction:
Add tests for loading binaries with malformed sections
## Code After:
import nose
import StringIO
import os
import cle
tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
def test_malformed_sections():
ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
nose.tools.assert_equal(len(ld.main_object.segments), 1)
nose.tools.assert_equal(len(ld.main_object.sections), 0)
|
import nose
import StringIO
+ import os
import cle
+
+ tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
+
+ def test_malformed_sections():
+ ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
+ nose.tools.assert_equal(len(ld.main_object.segments), 1)
+ nose.tools.assert_equal(len(ld.main_object.sections), 0)
|
44f603cd947f63101cf6b7eb8e49b5210cfa4f6f
|
wry/__init__.py
|
wry/__init__.py
|
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
import version
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
Add version. Note this will cause the file to be modified in your working copy. This change is gitignored
|
Add version. Note this will cause the file to be modified in your
working copy. This change is gitignored
|
Python
|
apache-2.0
|
ocadotechnology/wry
|
+ import version
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
Add version. Note this will cause the file to be modified in your working copy. This change is gitignored
|
## Code Before:
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
## Instruction:
Add version. Note this will cause the file to be modified in your working copy. This change is gitignored
## Code After:
import version
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
+ import version
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
fe4a0e0b86300f7da5f45a5541ee9c42c0a76ab7
|
docker_manager/dispatcher.py
|
docker_manager/dispatcher.py
|
import requests_unixsocket
import urllib
import json
import re
def dockerapi_dispatcher(app,request):
method = request.method
uri = re.match(r"^.+/dockerapi/(.+)", request.url).group(1)
session = requests_unixsocket.Session()
unix_socket = urllib.quote_plus( app.config['SOCKET'] )
return getattr(session,method.lower())('http+unix://{0}/{1}'.format(unix_socket,uri),json=request.json)
|
import requests_unixsocket
import urllib
import re
def dockerapi_dispatcher(app,request):
method = request.method
uri = re.match(r"^.+/dockerapi/(.+)", request.url).group(1)
session = requests_unixsocket.Session()
unix_socket = urllib.quote_plus( app.config['SOCKET'] )
return getattr(session,method.lower())('http+unix://{0}/{1}'.format(unix_socket,uri),json=request.json)
|
Remove unsed json module import
|
Remove unsed json module import
|
Python
|
apache-2.0
|
nathanIL/docker-manager,nathanIL/docker-manager,nathanIL/docker-manager
|
import requests_unixsocket
import urllib
- import json
import re
def dockerapi_dispatcher(app,request):
method = request.method
uri = re.match(r"^.+/dockerapi/(.+)", request.url).group(1)
session = requests_unixsocket.Session()
unix_socket = urllib.quote_plus( app.config['SOCKET'] )
return getattr(session,method.lower())('http+unix://{0}/{1}'.format(unix_socket,uri),json=request.json)
|
Remove unsed json module import
|
## Code Before:
import requests_unixsocket
import urllib
import json
import re
def dockerapi_dispatcher(app,request):
method = request.method
uri = re.match(r"^.+/dockerapi/(.+)", request.url).group(1)
session = requests_unixsocket.Session()
unix_socket = urllib.quote_plus( app.config['SOCKET'] )
return getattr(session,method.lower())('http+unix://{0}/{1}'.format(unix_socket,uri),json=request.json)
## Instruction:
Remove unsed json module import
## Code After:
import requests_unixsocket
import urllib
import re
def dockerapi_dispatcher(app,request):
method = request.method
uri = re.match(r"^.+/dockerapi/(.+)", request.url).group(1)
session = requests_unixsocket.Session()
unix_socket = urllib.quote_plus( app.config['SOCKET'] )
return getattr(session,method.lower())('http+unix://{0}/{1}'.format(unix_socket,uri),json=request.json)
|
import requests_unixsocket
import urllib
- import json
import re
def dockerapi_dispatcher(app,request):
method = request.method
uri = re.match(r"^.+/dockerapi/(.+)", request.url).group(1)
session = requests_unixsocket.Session()
unix_socket = urllib.quote_plus( app.config['SOCKET'] )
return getattr(session,method.lower())('http+unix://{0}/{1}'.format(unix_socket,uri),json=request.json)
|
23a8943d2e3688753371b08c490aaae2052eb356
|
ckanext/mapactionevent/logic/action/create.py
|
ckanext/mapactionevent/logic/action/create.py
|
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
if len(existing_events) == 0:
event_code = 1
else:
event_code = int(existing_events[0]) + 1
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
event_code = 1 #default value, if there are no existing numericly named events
for event in existing_events:
if event.isdigit():
event_code = int(event) + 1
break
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
Make auto-incrementing event names work with a mixture of numeric and non-numeric event names
|
Make auto-incrementing event names work with a mixture of numeric and non-numeric event names
|
Python
|
agpl-3.0
|
aptivate/ckanext-mapactionevent,aptivate/ckanext-mapactionevent,aptivate/ckanext-mapactionevent
|
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
- if len(existing_events) == 0:
- event_code = 1
- else:
+
+ event_code = 1 #default value, if there are no existing numericly named events
+
+ for event in existing_events:
+ if event.isdigit():
- event_code = int(existing_events[0]) + 1
+ event_code = int(event) + 1
+ break
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
Make auto-incrementing event names work with a mixture of numeric and non-numeric event names
|
## Code Before:
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
if len(existing_events) == 0:
event_code = 1
else:
event_code = int(existing_events[0]) + 1
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
## Instruction:
Make auto-incrementing event names work with a mixture of numeric and non-numeric event names
## Code After:
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
event_code = 1 #default value, if there are no existing numericly named events
for event in existing_events:
if event.isdigit():
event_code = int(event) + 1
break
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
+
+ event_code = 1 #default value, if there are no existing numericly named events
+
- if len(existing_events) == 0:
? - ^ ^ ------
+ for event in existing_events:
? ++ ^^ ^^^^^
+ if event.isdigit():
- event_code = 1
- else:
- event_code = int(existing_events[0]) + 1
? --------- ----
+ event_code = int(event) + 1
? ++++
+ break
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
e99855e31c30d0b554d24b14d98ae8b76e1fc0a0
|
create_tables.py
|
create_tables.py
|
from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
db.manual_close()
if __name__ == '__main__':
create_tables()
|
from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
if __name__ == '__main__':
create_tables()
db.manual_close()
|
Fix close connection only when called as script.
|
Fix close connection only when called as script.
|
Python
|
cc0-1.0
|
sketchturnerr/WaifuSim-backend,sketchturnerr/WaifuSim-backend
|
from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
- db.manual_close()
if __name__ == '__main__':
create_tables()
+ db.manual_close()
|
Fix close connection only when called as script.
|
## Code Before:
from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
db.manual_close()
if __name__ == '__main__':
create_tables()
## Instruction:
Fix close connection only when called as script.
## Code After:
from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
if __name__ == '__main__':
create_tables()
db.manual_close()
|
from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
- db.manual_close()
if __name__ == '__main__':
create_tables()
+ db.manual_close()
|
52d38e360b14fcfad01f87ff1e9ca5db27004877
|
src/comms/admin.py
|
src/comms/admin.py
|
from django.contrib import admin
from src.comms.models import ChannelDB
class MsgAdmin(admin.ModelAdmin):
list_display = ('id', 'db_date_sent', 'db_sender', 'db_receivers',
'db_channels', 'db_message', 'db_lock_storage')
list_display_links = ("id",)
ordering = ["db_date_sent", 'db_sender', 'db_receivers', 'db_channels']
#readonly_fields = ['db_message', 'db_sender', 'db_receivers', 'db_channels']
search_fields = ['id', '^db_date_sent', '^db_message']
save_as = True
save_on_top = True
list_select_related = True
#admin.site.register(Msg, MsgAdmin)
class ChannelAdmin(admin.ModelAdmin):
list_display = ('id', 'db_key', 'db_lock_storage', "db_subscriptions")
list_display_links = ("id", 'db_key')
ordering = ["db_key"]
search_fields = ['id', 'db_key', 'db_aliases']
save_as = True
save_on_top = True
list_select_related = True
fieldsets = (
(None, {'fields': (('db_key',), 'db_lock_storage')}),
)
admin.site.register(ChannelDB, ChannelAdmin)
|
from django.contrib import admin
from src.comms.models import ChannelDB
class MsgAdmin(admin.ModelAdmin):
list_display = ('id', 'db_date_sent', 'db_sender', 'db_receivers',
'db_channels', 'db_message', 'db_lock_storage')
list_display_links = ("id",)
ordering = ["db_date_sent", 'db_sender', 'db_receivers', 'db_channels']
#readonly_fields = ['db_message', 'db_sender', 'db_receivers', 'db_channels']
search_fields = ['id', '^db_date_sent', '^db_message']
save_as = True
save_on_top = True
list_select_related = True
#admin.site.register(Msg, MsgAdmin)
class ChannelAdmin(admin.ModelAdmin):
list_display = ('id', 'db_key', 'db_lock_storage')
list_display_links = ("id", 'db_key')
ordering = ["db_key"]
search_fields = ['id', 'db_key', 'db_aliases']
save_as = True
save_on_top = True
list_select_related = True
fieldsets = (
(None, {'fields': (('db_key',), 'db_lock_storage')}),
)
admin.site.register(ChannelDB, ChannelAdmin)
|
Remove unsupport M2M field in channelAdmin handler. Removes traceback when DEBUG=True.
|
Remove unsupport M2M field in channelAdmin handler. Removes traceback when DEBUG=True.
|
Python
|
bsd-3-clause
|
ypwalter/evennia,TheTypoMaster/evennia,TheTypoMaster/evennia,mrkulk/text-world,mrkulk/text-world,titeuf87/evennia,ergodicbreak/evennia,mrkulk/text-world,feend78/evennia,shollen/evennia,jamesbeebop/evennia,shollen/evennia,feend78/evennia,ergodicbreak/evennia,feend78/evennia,titeuf87/evennia,mrkulk/text-world,jamesbeebop/evennia,emergebtc/evennia,ypwalter/evennia,titeuf87/evennia,TheTypoMaster/evennia,emergebtc/evennia,titeuf87/evennia,ypwalter/evennia,feend78/evennia,emergebtc/evennia,jamesbeebop/evennia,ergodicbreak/evennia
|
from django.contrib import admin
from src.comms.models import ChannelDB
class MsgAdmin(admin.ModelAdmin):
list_display = ('id', 'db_date_sent', 'db_sender', 'db_receivers',
'db_channels', 'db_message', 'db_lock_storage')
list_display_links = ("id",)
ordering = ["db_date_sent", 'db_sender', 'db_receivers', 'db_channels']
#readonly_fields = ['db_message', 'db_sender', 'db_receivers', 'db_channels']
search_fields = ['id', '^db_date_sent', '^db_message']
save_as = True
save_on_top = True
list_select_related = True
#admin.site.register(Msg, MsgAdmin)
class ChannelAdmin(admin.ModelAdmin):
- list_display = ('id', 'db_key', 'db_lock_storage', "db_subscriptions")
+ list_display = ('id', 'db_key', 'db_lock_storage')
list_display_links = ("id", 'db_key')
ordering = ["db_key"]
search_fields = ['id', 'db_key', 'db_aliases']
save_as = True
save_on_top = True
list_select_related = True
fieldsets = (
(None, {'fields': (('db_key',), 'db_lock_storage')}),
)
admin.site.register(ChannelDB, ChannelAdmin)
|
Remove unsupport M2M field in channelAdmin handler. Removes traceback when DEBUG=True.
|
## Code Before:
from django.contrib import admin
from src.comms.models import ChannelDB
class MsgAdmin(admin.ModelAdmin):
list_display = ('id', 'db_date_sent', 'db_sender', 'db_receivers',
'db_channels', 'db_message', 'db_lock_storage')
list_display_links = ("id",)
ordering = ["db_date_sent", 'db_sender', 'db_receivers', 'db_channels']
#readonly_fields = ['db_message', 'db_sender', 'db_receivers', 'db_channels']
search_fields = ['id', '^db_date_sent', '^db_message']
save_as = True
save_on_top = True
list_select_related = True
#admin.site.register(Msg, MsgAdmin)
class ChannelAdmin(admin.ModelAdmin):
list_display = ('id', 'db_key', 'db_lock_storage', "db_subscriptions")
list_display_links = ("id", 'db_key')
ordering = ["db_key"]
search_fields = ['id', 'db_key', 'db_aliases']
save_as = True
save_on_top = True
list_select_related = True
fieldsets = (
(None, {'fields': (('db_key',), 'db_lock_storage')}),
)
admin.site.register(ChannelDB, ChannelAdmin)
## Instruction:
Remove unsupport M2M field in channelAdmin handler. Removes traceback when DEBUG=True.
## Code After:
from django.contrib import admin
from src.comms.models import ChannelDB
class MsgAdmin(admin.ModelAdmin):
list_display = ('id', 'db_date_sent', 'db_sender', 'db_receivers',
'db_channels', 'db_message', 'db_lock_storage')
list_display_links = ("id",)
ordering = ["db_date_sent", 'db_sender', 'db_receivers', 'db_channels']
#readonly_fields = ['db_message', 'db_sender', 'db_receivers', 'db_channels']
search_fields = ['id', '^db_date_sent', '^db_message']
save_as = True
save_on_top = True
list_select_related = True
#admin.site.register(Msg, MsgAdmin)
class ChannelAdmin(admin.ModelAdmin):
list_display = ('id', 'db_key', 'db_lock_storage')
list_display_links = ("id", 'db_key')
ordering = ["db_key"]
search_fields = ['id', 'db_key', 'db_aliases']
save_as = True
save_on_top = True
list_select_related = True
fieldsets = (
(None, {'fields': (('db_key',), 'db_lock_storage')}),
)
admin.site.register(ChannelDB, ChannelAdmin)
|
from django.contrib import admin
from src.comms.models import ChannelDB
class MsgAdmin(admin.ModelAdmin):
list_display = ('id', 'db_date_sent', 'db_sender', 'db_receivers',
'db_channels', 'db_message', 'db_lock_storage')
list_display_links = ("id",)
ordering = ["db_date_sent", 'db_sender', 'db_receivers', 'db_channels']
#readonly_fields = ['db_message', 'db_sender', 'db_receivers', 'db_channels']
search_fields = ['id', '^db_date_sent', '^db_message']
save_as = True
save_on_top = True
list_select_related = True
#admin.site.register(Msg, MsgAdmin)
class ChannelAdmin(admin.ModelAdmin):
- list_display = ('id', 'db_key', 'db_lock_storage', "db_subscriptions")
? --------------------
+ list_display = ('id', 'db_key', 'db_lock_storage')
list_display_links = ("id", 'db_key')
ordering = ["db_key"]
search_fields = ['id', 'db_key', 'db_aliases']
save_as = True
save_on_top = True
list_select_related = True
fieldsets = (
(None, {'fields': (('db_key',), 'db_lock_storage')}),
)
admin.site.register(ChannelDB, ChannelAdmin)
|
75225c176135b6d17c8f10ea67dabb4b0fc02505
|
nodeconductor/iaas/migrations/0009_add_min_ram_and_disk_to_image.py
|
nodeconductor/iaas/migrations/0009_add_min_ram_and_disk_to_image.py
|
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0008_add_instance_restarting_state'),
]
operations = [
migrations.AddField(
model_name='image',
name='min_disk',
field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'),
preserve_default=True,
),
migrations.AddField(
model_name='image',
name='min_ram',
field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'),
preserve_default=True,
),
migrations.AlterField(
model_name='instance',
name='state',
field=django_fsm.FSMIntegerField(default=1, help_text='WARNING! Should not be changed manually unless you really know what you are doing.', max_length=1, choices=[(1, 'Provisioning Scheduled'), (2, 'Provisioning'), (3, 'Online'), (4, 'Offline'), (5, 'Starting Scheduled'), (6, 'Starting'), (7, 'Stopping Scheduled'), (8, 'Stopping'), (9, 'Erred'), (10, 'Deletion Scheduled'), (11, 'Deleting'), (13, 'Resizing Scheduled'), (14, 'Resizing'), (15, 'Restarting Scheduled'), (16, 'Restarting')]),
preserve_default=True,
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0008_add_instance_restarting_state'),
]
operations = [
migrations.AddField(
model_name='image',
name='min_disk',
field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'),
preserve_default=True,
),
migrations.AddField(
model_name='image',
name='min_ram',
field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'),
preserve_default=True,
),
]
|
Remove field duplication from migrations(nc-301)
|
Remove field duplication from migrations(nc-301)
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0008_add_instance_restarting_state'),
]
operations = [
migrations.AddField(
model_name='image',
name='min_disk',
field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'),
preserve_default=True,
),
migrations.AddField(
model_name='image',
name='min_ram',
field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'),
preserve_default=True,
),
- migrations.AlterField(
- model_name='instance',
- name='state',
- field=django_fsm.FSMIntegerField(default=1, help_text='WARNING! Should not be changed manually unless you really know what you are doing.', max_length=1, choices=[(1, 'Provisioning Scheduled'), (2, 'Provisioning'), (3, 'Online'), (4, 'Offline'), (5, 'Starting Scheduled'), (6, 'Starting'), (7, 'Stopping Scheduled'), (8, 'Stopping'), (9, 'Erred'), (10, 'Deletion Scheduled'), (11, 'Deleting'), (13, 'Resizing Scheduled'), (14, 'Resizing'), (15, 'Restarting Scheduled'), (16, 'Restarting')]),
- preserve_default=True,
- ),
]
|
Remove field duplication from migrations(nc-301)
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0008_add_instance_restarting_state'),
]
operations = [
migrations.AddField(
model_name='image',
name='min_disk',
field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'),
preserve_default=True,
),
migrations.AddField(
model_name='image',
name='min_ram',
field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'),
preserve_default=True,
),
migrations.AlterField(
model_name='instance',
name='state',
field=django_fsm.FSMIntegerField(default=1, help_text='WARNING! Should not be changed manually unless you really know what you are doing.', max_length=1, choices=[(1, 'Provisioning Scheduled'), (2, 'Provisioning'), (3, 'Online'), (4, 'Offline'), (5, 'Starting Scheduled'), (6, 'Starting'), (7, 'Stopping Scheduled'), (8, 'Stopping'), (9, 'Erred'), (10, 'Deletion Scheduled'), (11, 'Deleting'), (13, 'Resizing Scheduled'), (14, 'Resizing'), (15, 'Restarting Scheduled'), (16, 'Restarting')]),
preserve_default=True,
),
]
## Instruction:
Remove field duplication from migrations(nc-301)
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0008_add_instance_restarting_state'),
]
operations = [
migrations.AddField(
model_name='image',
name='min_disk',
field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'),
preserve_default=True,
),
migrations.AddField(
model_name='image',
name='min_ram',
field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'),
preserve_default=True,
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0008_add_instance_restarting_state'),
]
operations = [
migrations.AddField(
model_name='image',
name='min_disk',
field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'),
preserve_default=True,
),
migrations.AddField(
model_name='image',
name='min_ram',
field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'),
preserve_default=True,
),
- migrations.AlterField(
- model_name='instance',
- name='state',
- field=django_fsm.FSMIntegerField(default=1, help_text='WARNING! Should not be changed manually unless you really know what you are doing.', max_length=1, choices=[(1, 'Provisioning Scheduled'), (2, 'Provisioning'), (3, 'Online'), (4, 'Offline'), (5, 'Starting Scheduled'), (6, 'Starting'), (7, 'Stopping Scheduled'), (8, 'Stopping'), (9, 'Erred'), (10, 'Deletion Scheduled'), (11, 'Deleting'), (13, 'Resizing Scheduled'), (14, 'Resizing'), (15, 'Restarting Scheduled'), (16, 'Restarting')]),
- preserve_default=True,
- ),
]
|
749aa35a85b6482cfba9dec7d37473a787d73c32
|
integration-test/1106-merge-ocean-earth.py
|
integration-test/1106-merge-ocean-earth.py
|
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
# There should be a single (merged) earth feature in this tile
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
|
assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2)
assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2)
# OpenStreetMap
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
|
Add lowzoom tests for polygon merging
|
Add lowzoom tests for polygon merging
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
+ assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2)
+ assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2)
+
+ # OpenStreetMap
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
- # There should be a single (merged) earth feature in this tile
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
|
Add lowzoom tests for polygon merging
|
## Code Before:
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
# There should be a single (merged) earth feature in this tile
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
## Instruction:
Add lowzoom tests for polygon merging
## Code After:
assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2)
assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2)
# OpenStreetMap
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
|
+ assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2)
+ assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2)
+
+ # OpenStreetMap
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
- # There should be a single (merged) earth feature in this tile
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
|
acd92d6a9e8c710657a4bcf1c46076f9d06f3d46
|
test_results/plot_all.py
|
test_results/plot_all.py
|
import glob
import csv
import numpy as np
import matplotlib.pyplot as plt
for file in glob.glob("*.csv"):
data = np.genfromtxt(file, delimiter = ',', names = True)
plt.figure(figsize=(10,20))
plt.suptitle(file)
num_plots = len(data.dtype.names)
count = 1
for col_name in data.dtype.names:
plt.subplot(num_plots, 1, count)
plt.plot(data[col_name], label=col_name)
plt.legend()
count += 1
ymin, ymax = plt.ylim()
if ymin < 0 < ymax:
plt.axhline(0, hold=True, color = 'grey') # plot line through zero
mng = plt.get_current_fig_manager()
if plt.get_backend() == 'TkAgg':
mng.window.state('zoomed')
elif plt.get_backend() == 'wxAgg':
mng.frame.Maximize(True)
elif plt.get_backend() == 'QT4Agg':
mng.window.showMaximized()
plt.savefig(file.rstrip('.csv') + '.pdf')
plt.show()
|
import glob
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.backends.backend_pdf
pdf = matplotlib.backends.backend_pdf.PdfPages("test-results-plots.pdf")
for file in glob.glob("*.csv"):
data = np.genfromtxt(file, delimiter = ',', names = True)
plt.figure(figsize=(10,20))
plt.suptitle(file)
num_plots = len(data.dtype.names)
count = 1
for col_name in data.dtype.names:
plt.subplot(num_plots, 1, count)
plt.plot(data[col_name], label=col_name)
plt.legend()
count += 1
ymin, ymax = plt.ylim()
if ymin < 0 < ymax:
plt.axhline(0, hold=True, color = 'grey') # plot line through zero
pdf.savefig()
mng = plt.get_current_fig_manager()
if plt.get_backend() == 'TkAgg':
mng.window.state('zoomed')
elif plt.get_backend() == 'wxAgg':
mng.frame.Maximize(True)
elif plt.get_backend() == 'QT4Agg':
mng.window.showMaximized()
plt.show()
plt.close()
pdf.close()
|
Save all simulation plots to one PDF instead of multiple
|
Save all simulation plots to one PDF instead of multiple
|
Python
|
agpl-3.0
|
BrewPi/firmware,glibersat/firmware,glibersat/firmware,BrewPi/firmware,glibersat/firmware,etk29321/firmware,etk29321/firmware,etk29321/firmware,etk29321/firmware,glibersat/firmware,glibersat/firmware,BrewPi/firmware,BrewPi/firmware,BrewPi/firmware,BrewPi/firmware,BrewPi/firmware,etk29321/firmware,BrewPi/firmware,etk29321/firmware,glibersat/firmware,glibersat/firmware
|
import glob
- import csv
import numpy as np
import matplotlib.pyplot as plt
+ import matplotlib.backends.backend_pdf
+ pdf = matplotlib.backends.backend_pdf.PdfPages("test-results-plots.pdf")
for file in glob.glob("*.csv"):
data = np.genfromtxt(file, delimiter = ',', names = True)
plt.figure(figsize=(10,20))
plt.suptitle(file)
num_plots = len(data.dtype.names)
count = 1
for col_name in data.dtype.names:
plt.subplot(num_plots, 1, count)
plt.plot(data[col_name], label=col_name)
plt.legend()
count += 1
ymin, ymax = plt.ylim()
if ymin < 0 < ymax:
plt.axhline(0, hold=True, color = 'grey') # plot line through zero
+ pdf.savefig()
+
mng = plt.get_current_fig_manager()
if plt.get_backend() == 'TkAgg':
mng.window.state('zoomed')
elif plt.get_backend() == 'wxAgg':
mng.frame.Maximize(True)
elif plt.get_backend() == 'QT4Agg':
mng.window.showMaximized()
- plt.savefig(file.rstrip('.csv') + '.pdf')
+
plt.show()
+ plt.close()
+
+ pdf.close()
+
|
Save all simulation plots to one PDF instead of multiple
|
## Code Before:
import glob
import csv
import numpy as np
import matplotlib.pyplot as plt
for file in glob.glob("*.csv"):
data = np.genfromtxt(file, delimiter = ',', names = True)
plt.figure(figsize=(10,20))
plt.suptitle(file)
num_plots = len(data.dtype.names)
count = 1
for col_name in data.dtype.names:
plt.subplot(num_plots, 1, count)
plt.plot(data[col_name], label=col_name)
plt.legend()
count += 1
ymin, ymax = plt.ylim()
if ymin < 0 < ymax:
plt.axhline(0, hold=True, color = 'grey') # plot line through zero
mng = plt.get_current_fig_manager()
if plt.get_backend() == 'TkAgg':
mng.window.state('zoomed')
elif plt.get_backend() == 'wxAgg':
mng.frame.Maximize(True)
elif plt.get_backend() == 'QT4Agg':
mng.window.showMaximized()
plt.savefig(file.rstrip('.csv') + '.pdf')
plt.show()
## Instruction:
Save all simulation plots to one PDF instead of multiple
## Code After:
import glob
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.backends.backend_pdf
pdf = matplotlib.backends.backend_pdf.PdfPages("test-results-plots.pdf")
for file in glob.glob("*.csv"):
data = np.genfromtxt(file, delimiter = ',', names = True)
plt.figure(figsize=(10,20))
plt.suptitle(file)
num_plots = len(data.dtype.names)
count = 1
for col_name in data.dtype.names:
plt.subplot(num_plots, 1, count)
plt.plot(data[col_name], label=col_name)
plt.legend()
count += 1
ymin, ymax = plt.ylim()
if ymin < 0 < ymax:
plt.axhline(0, hold=True, color = 'grey') # plot line through zero
pdf.savefig()
mng = plt.get_current_fig_manager()
if plt.get_backend() == 'TkAgg':
mng.window.state('zoomed')
elif plt.get_backend() == 'wxAgg':
mng.frame.Maximize(True)
elif plt.get_backend() == 'QT4Agg':
mng.window.showMaximized()
plt.show()
plt.close()
pdf.close()
|
import glob
- import csv
import numpy as np
import matplotlib.pyplot as plt
+ import matplotlib.backends.backend_pdf
+ pdf = matplotlib.backends.backend_pdf.PdfPages("test-results-plots.pdf")
for file in glob.glob("*.csv"):
data = np.genfromtxt(file, delimiter = ',', names = True)
plt.figure(figsize=(10,20))
plt.suptitle(file)
num_plots = len(data.dtype.names)
count = 1
for col_name in data.dtype.names:
plt.subplot(num_plots, 1, count)
plt.plot(data[col_name], label=col_name)
plt.legend()
count += 1
ymin, ymax = plt.ylim()
if ymin < 0 < ymax:
plt.axhline(0, hold=True, color = 'grey') # plot line through zero
+ pdf.savefig()
+
mng = plt.get_current_fig_manager()
if plt.get_backend() == 'TkAgg':
mng.window.state('zoomed')
elif plt.get_backend() == 'wxAgg':
mng.frame.Maximize(True)
elif plt.get_backend() == 'QT4Agg':
mng.window.showMaximized()
- plt.savefig(file.rstrip('.csv') + '.pdf')
+
plt.show()
+ plt.close()
+
+ pdf.close()
+
|
00b798c309d8807a562efb31751e82e5149ac7c8
|
molo/core/api/tests/test_importers.py
|
molo/core/api/tests/test_importers.py
|
import json
from django.test import TestCase
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.api import importers
from molo.core.api.tests import constants
class ArticleImportTestCase(MoloTestCaseMixin, TestCase):
def setUp(self):
self.mk_main()
def test_importer_initializtion(self):
content = json.dumps(constants.AVAILABLE_ARTICLES)
importer = importers.ArticlePageImporter(content=content)
|
import json
from django.test import TestCase
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.api import importers
from molo.core.api.tests import constants
class ArticleImportTestCase(MoloTestCaseMixin, TestCase):
def setUp(self):
self.mk_main()
def test_importer_initializtion(self):
content = json.dumps(constants.AVAILABLE_ARTICLES)
importer = importers.ArticlePageImporter(content=content)
self.assertEqual(importer.articles(), content["items"])
|
Write test for importer initialisation
|
Write test for importer initialisation
|
Python
|
bsd-2-clause
|
praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo
|
import json
from django.test import TestCase
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.api import importers
from molo.core.api.tests import constants
class ArticleImportTestCase(MoloTestCaseMixin, TestCase):
def setUp(self):
self.mk_main()
def test_importer_initializtion(self):
content = json.dumps(constants.AVAILABLE_ARTICLES)
importer = importers.ArticlePageImporter(content=content)
+
+ self.assertEqual(importer.articles(), content["items"])
|
Write test for importer initialisation
|
## Code Before:
import json
from django.test import TestCase
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.api import importers
from molo.core.api.tests import constants
class ArticleImportTestCase(MoloTestCaseMixin, TestCase):
def setUp(self):
self.mk_main()
def test_importer_initializtion(self):
content = json.dumps(constants.AVAILABLE_ARTICLES)
importer = importers.ArticlePageImporter(content=content)
## Instruction:
Write test for importer initialisation
## Code After:
import json
from django.test import TestCase
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.api import importers
from molo.core.api.tests import constants
class ArticleImportTestCase(MoloTestCaseMixin, TestCase):
def setUp(self):
self.mk_main()
def test_importer_initializtion(self):
content = json.dumps(constants.AVAILABLE_ARTICLES)
importer = importers.ArticlePageImporter(content=content)
self.assertEqual(importer.articles(), content["items"])
|
import json
from django.test import TestCase
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.api import importers
from molo.core.api.tests import constants
class ArticleImportTestCase(MoloTestCaseMixin, TestCase):
def setUp(self):
self.mk_main()
def test_importer_initializtion(self):
content = json.dumps(constants.AVAILABLE_ARTICLES)
importer = importers.ArticlePageImporter(content=content)
+
+ self.assertEqual(importer.articles(), content["items"])
|
30be8d71fee8f7429d6b4d48a8168133062e3315
|
text_test/regex_utils_test.py
|
text_test/regex_utils_test.py
|
import unittest
from text import regex_utils
class RegexUtilsTest(unittest.TestCase):
def test_check_line(self):
pass
def test_parse_line(self):
pass
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
import unittest
from text import regex_utils
class RegexUtilsTest(unittest.TestCase):
def test_check_line(self):
self.assertTrue(regex_utils.check_line('.*(\d+.\d+.\d+.\d+)', 'MyIP is 192.168.199.4'))
self.assertTrue(regex_utils.check_line('Test (Data|Case) For (py-text|py-task)', 'Test Data For py-text'))
self.assertFalse(regex_utils.check_line('.*(\d+.\d+.\d+.{100,255})', 'MyIP is 192.168.199.4'))
self.assertFalse(regex_utils.check_line(None, 'Test Word'))
self.assertFalse(regex_utils.check_line('.*', None))
def test_parse_line(self):
result = regex_utils.parse_line('name=(\S+), type=(\S+), ip=(\S+)', 'name=ASA5505, type=Firewall, ip=192.168.199.4')
self.assertEqual(len(result), 3)
self.assertEqual(result[0], 'ASA5505')
self.assertEqual(result[1], 'Firewall')
self.assertEqual(result[2], '192.168.199.4')
result = regex_utils.parse_line('Test Data', None)
self.assertEqual(result, None)
result = regex_utils.parse_line(None, 'Test Data')
self.assertEqual(result, 'Test Data')
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Update regex_utils unit test case
|
Update regex_utils unit test case
|
Python
|
apache-2.0
|
PinaeOS/py-text,interhui/py-text
|
import unittest
from text import regex_utils
class RegexUtilsTest(unittest.TestCase):
def test_check_line(self):
- pass
+ self.assertTrue(regex_utils.check_line('.*(\d+.\d+.\d+.\d+)', 'MyIP is 192.168.199.4'))
+ self.assertTrue(regex_utils.check_line('Test (Data|Case) For (py-text|py-task)', 'Test Data For py-text'))
+ self.assertFalse(regex_utils.check_line('.*(\d+.\d+.\d+.{100,255})', 'MyIP is 192.168.199.4'))
+ self.assertFalse(regex_utils.check_line(None, 'Test Word'))
+ self.assertFalse(regex_utils.check_line('.*', None))
def test_parse_line(self):
+ result = regex_utils.parse_line('name=(\S+), type=(\S+), ip=(\S+)', 'name=ASA5505, type=Firewall, ip=192.168.199.4')
+ self.assertEqual(len(result), 3)
+ self.assertEqual(result[0], 'ASA5505')
+ self.assertEqual(result[1], 'Firewall')
+ self.assertEqual(result[2], '192.168.199.4')
- pass
+
+ result = regex_utils.parse_line('Test Data', None)
+ self.assertEqual(result, None)
+
+ result = regex_utils.parse_line(None, 'Test Data')
+ self.assertEqual(result, 'Test Data')
+
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Update regex_utils unit test case
|
## Code Before:
import unittest
from text import regex_utils
class RegexUtilsTest(unittest.TestCase):
def test_check_line(self):
pass
def test_parse_line(self):
pass
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
## Instruction:
Update regex_utils unit test case
## Code After:
import unittest
from text import regex_utils
class RegexUtilsTest(unittest.TestCase):
def test_check_line(self):
self.assertTrue(regex_utils.check_line('.*(\d+.\d+.\d+.\d+)', 'MyIP is 192.168.199.4'))
self.assertTrue(regex_utils.check_line('Test (Data|Case) For (py-text|py-task)', 'Test Data For py-text'))
self.assertFalse(regex_utils.check_line('.*(\d+.\d+.\d+.{100,255})', 'MyIP is 192.168.199.4'))
self.assertFalse(regex_utils.check_line(None, 'Test Word'))
self.assertFalse(regex_utils.check_line('.*', None))
def test_parse_line(self):
result = regex_utils.parse_line('name=(\S+), type=(\S+), ip=(\S+)', 'name=ASA5505, type=Firewall, ip=192.168.199.4')
self.assertEqual(len(result), 3)
self.assertEqual(result[0], 'ASA5505')
self.assertEqual(result[1], 'Firewall')
self.assertEqual(result[2], '192.168.199.4')
result = regex_utils.parse_line('Test Data', None)
self.assertEqual(result, None)
result = regex_utils.parse_line(None, 'Test Data')
self.assertEqual(result, 'Test Data')
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
import unittest
from text import regex_utils
class RegexUtilsTest(unittest.TestCase):
def test_check_line(self):
- pass
+ self.assertTrue(regex_utils.check_line('.*(\d+.\d+.\d+.\d+)', 'MyIP is 192.168.199.4'))
+ self.assertTrue(regex_utils.check_line('Test (Data|Case) For (py-text|py-task)', 'Test Data For py-text'))
+ self.assertFalse(regex_utils.check_line('.*(\d+.\d+.\d+.{100,255})', 'MyIP is 192.168.199.4'))
+ self.assertFalse(regex_utils.check_line(None, 'Test Word'))
+ self.assertFalse(regex_utils.check_line('.*', None))
def test_parse_line(self):
+ result = regex_utils.parse_line('name=(\S+), type=(\S+), ip=(\S+)', 'name=ASA5505, type=Firewall, ip=192.168.199.4')
+ self.assertEqual(len(result), 3)
+ self.assertEqual(result[0], 'ASA5505')
+ self.assertEqual(result[1], 'Firewall')
+ self.assertEqual(result[2], '192.168.199.4')
- pass
? ----
+
+ result = regex_utils.parse_line('Test Data', None)
+ self.assertEqual(result, None)
+
+ result = regex_utils.parse_line(None, 'Test Data')
+ self.assertEqual(result, 'Test Data')
+
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
e2ee9045c59e3f03c5342ee41d23e4adece43535
|
weather/admin.py
|
weather/admin.py
|
from django.contrib.admin import ModelAdmin, register
from django.contrib.gis.admin import GeoModelAdmin
from weather.models import WeatherStation, Location
@register(Location)
class LocationAdmin(GeoModelAdmin):
pass
@register(WeatherStation)
class WeatherStationAdmin(ModelAdmin):
list_display = (
'name', 'abbreviation', 'ip_address', 'last_reading',
'battery_voltage', 'connect_every', 'active')
|
from django.contrib.admin import ModelAdmin, register
from django.contrib.gis.admin import GeoModelAdmin
from weather.models import WeatherStation, Location
@register(Location)
class LocationAdmin(GeoModelAdmin):
openlayers_url = '//static.dpaw.wa.gov.au/static/libs/openlayers/2.13.1/OpenLayers.js'
@register(WeatherStation)
class WeatherStationAdmin(ModelAdmin):
list_display = (
'name', 'abbreviation', 'ip_address', 'last_reading',
'battery_voltage', 'connect_every', 'active')
|
Define URL for OpenLayers.js to DPaW CDN.
|
Define URL for OpenLayers.js to DPaW CDN.
|
Python
|
bsd-3-clause
|
parksandwildlife/resource_tracking,parksandwildlife/resource_tracking,ropable/resource_tracking,ropable/resource_tracking,ropable/resource_tracking,parksandwildlife/resource_tracking
|
from django.contrib.admin import ModelAdmin, register
from django.contrib.gis.admin import GeoModelAdmin
from weather.models import WeatherStation, Location
@register(Location)
class LocationAdmin(GeoModelAdmin):
- pass
+ openlayers_url = '//static.dpaw.wa.gov.au/static/libs/openlayers/2.13.1/OpenLayers.js'
@register(WeatherStation)
class WeatherStationAdmin(ModelAdmin):
list_display = (
'name', 'abbreviation', 'ip_address', 'last_reading',
'battery_voltage', 'connect_every', 'active')
|
Define URL for OpenLayers.js to DPaW CDN.
|
## Code Before:
from django.contrib.admin import ModelAdmin, register
from django.contrib.gis.admin import GeoModelAdmin
from weather.models import WeatherStation, Location
@register(Location)
class LocationAdmin(GeoModelAdmin):
pass
@register(WeatherStation)
class WeatherStationAdmin(ModelAdmin):
list_display = (
'name', 'abbreviation', 'ip_address', 'last_reading',
'battery_voltage', 'connect_every', 'active')
## Instruction:
Define URL for OpenLayers.js to DPaW CDN.
## Code After:
from django.contrib.admin import ModelAdmin, register
from django.contrib.gis.admin import GeoModelAdmin
from weather.models import WeatherStation, Location
@register(Location)
class LocationAdmin(GeoModelAdmin):
openlayers_url = '//static.dpaw.wa.gov.au/static/libs/openlayers/2.13.1/OpenLayers.js'
@register(WeatherStation)
class WeatherStationAdmin(ModelAdmin):
list_display = (
'name', 'abbreviation', 'ip_address', 'last_reading',
'battery_voltage', 'connect_every', 'active')
|
from django.contrib.admin import ModelAdmin, register
from django.contrib.gis.admin import GeoModelAdmin
from weather.models import WeatherStation, Location
@register(Location)
class LocationAdmin(GeoModelAdmin):
- pass
+ openlayers_url = '//static.dpaw.wa.gov.au/static/libs/openlayers/2.13.1/OpenLayers.js'
@register(WeatherStation)
class WeatherStationAdmin(ModelAdmin):
list_display = (
'name', 'abbreviation', 'ip_address', 'last_reading',
'battery_voltage', 'connect_every', 'active')
|
f4777e994a29a8dbc704950411156cca4ff59ac3
|
oscar/core/compat.py
|
oscar/core/compat.py
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
|
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
try:
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
Use better exception for AUTH_USER_MODEL
|
Use better exception for AUTH_USER_MODEL
If AUTH_USER_MODEL is improperly configured as 'project.customer.User',
the error is:
ValueError: too many values to unpack
Use rather standard Django's error:
ImproperlyConfigured: AUTH_USER_MODEL must be of the form
'app_label.model_name'
|
Python
|
bsd-3-clause
|
faratro/django-oscar,rocopartners/django-oscar,thechampanurag/django-oscar,spartonia/django-oscar,vovanbo/django-oscar,saadatqadri/django-oscar,sasha0/django-oscar,pdonadeo/django-oscar,monikasulik/django-oscar,bschuon/django-oscar,mexeniz/django-oscar,jinnykoo/christmas,QLGu/django-oscar,manevant/django-oscar,jinnykoo/christmas,anentropic/django-oscar,anentropic/django-oscar,michaelkuty/django-oscar,john-parton/django-oscar,django-oscar/django-oscar,faratro/django-oscar,john-parton/django-oscar,marcoantoniooliveira/labweb,kapari/django-oscar,saadatqadri/django-oscar,pdonadeo/django-oscar,elliotthill/django-oscar,ahmetdaglarbas/e-commerce,kapari/django-oscar,elliotthill/django-oscar,lijoantony/django-oscar,pasqualguerrero/django-oscar,kapt/django-oscar,bnprk/django-oscar,solarissmoke/django-oscar,binarydud/django-oscar,marcoantoniooliveira/labweb,saadatqadri/django-oscar,jinnykoo/christmas,eddiep1101/django-oscar,monikasulik/django-oscar,faratro/django-oscar,binarydud/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj,anentropic/django-oscar,manevant/django-oscar,MatthewWilkes/django-oscar,makielab/django-oscar,rocopartners/django-oscar,amirrpp/django-oscar,WadeYuChen/django-oscar,thechampanurag/django-oscar,thechampanurag/django-oscar,machtfit/django-oscar,john-parton/django-oscar,jlmadurga/django-oscar,eddiep1101/django-oscar,ka7eh/django-oscar,ka7eh/django-oscar,jlmadurga/django-oscar,pasqualguerrero/django-oscar,ahmetdaglarbas/e-commerce,manevant/django-oscar,taedori81/django-oscar,ahmetdaglarbas/e-commerce,ahmetdaglarbas/e-commerce,Bogh/django-oscar,Bogh/django-oscar,vovanbo/django-oscar,marcoantoniooliveira/labweb,solarissmoke/django-oscar,django-oscar/django-oscar,jinnykoo/wuyisj.com,sasha0/django-oscar,rocopartners/django-oscar,WillisXChen/django-oscar,itbabu/django-oscar,machtfit/django-oscar,bnprk/django-oscar,faratro/django-oscar,rocopartners/django-oscar,okfish/django-oscar,WillisXChen/django-oscar,josesanch/django-oscar,bnprk/django-oscar,nfletton/django-oscar,WadeYuChen/django-oscar,adamend/django-oscar,QLGu/django-oscar,MatthewWilkes/django-oscar,saadatqadri/django-oscar,jinnykoo/wuyisj,dongguangming/django-oscar,nickpack/django-oscar,taedori81/django-oscar,jinnykoo/wuyisj.com,QLGu/django-oscar,john-parton/django-oscar,amirrpp/django-oscar,sasha0/django-oscar,ademuk/django-oscar,Jannes123/django-oscar,nickpack/django-oscar,lijoantony/django-oscar,bschuon/django-oscar,nickpack/django-oscar,kapt/django-oscar,okfish/django-oscar,mexeniz/django-oscar,Jannes123/django-oscar,makielab/django-oscar,dongguangming/django-oscar,Idematica/django-oscar,django-oscar/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,Jannes123/django-oscar,pasqualguerrero/django-oscar,pdonadeo/django-oscar,sonofatailor/django-oscar,jlmadurga/django-oscar,vovanbo/django-oscar,ka7eh/django-oscar,spartonia/django-oscar,monikasulik/django-oscar,amirrpp/django-oscar,nickpack/django-oscar,josesanch/django-oscar,bschuon/django-oscar,vovanbo/django-oscar,MatthewWilkes/django-oscar,taedori81/django-oscar,marcoantoniooliveira/labweb,Idematica/django-oscar,manevant/django-oscar,ademuk/django-oscar,elliotthill/django-oscar,dongguangming/django-oscar,taedori81/django-oscar,adamend/django-oscar,QLGu/django-oscar,itbabu/django-oscar,spartonia/django-oscar,bnprk/django-oscar,pasqualguerrero/django-oscar,jmt4/django-oscar,binarydud/django-oscar,anentropic/django-oscar,pdonadeo/django-oscar,bschuon/django-oscar,okfish/django-oscar,sonofatailor/django-oscar,nfletton/django-oscar,thechampanurag/django-oscar,sonofatailor/django-oscar,ademuk/django-oscar,jinnykoo/wuyisj,monikasulik/django-oscar,kapari/django-oscar,ka7eh/django-oscar,michaelkuty/django-oscar,MatthewWilkes/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,Bogh/django-oscar,mexeniz/django-oscar,amirrpp/django-oscar,jmt4/django-oscar,Bogh/django-oscar,mexeniz/django-oscar,jmt4/django-oscar,jinnykoo/wuyisj.com,jinnykoo/wuyisj,WadeYuChen/django-oscar,eddiep1101/django-oscar,kapari/django-oscar,spartonia/django-oscar,solarissmoke/django-oscar,michaelkuty/django-oscar,nfletton/django-oscar,sonofatailor/django-oscar,makielab/django-oscar,dongguangming/django-oscar,DrOctogon/unwash_ecom,sasha0/django-oscar,adamend/django-oscar,WadeYuChen/django-oscar,jlmadurga/django-oscar,Idematica/django-oscar,makielab/django-oscar,solarissmoke/django-oscar,okfish/django-oscar,kapt/django-oscar,eddiep1101/django-oscar,WillisXChen/django-oscar,josesanch/django-oscar,Jannes123/django-oscar,itbabu/django-oscar,michaelkuty/django-oscar,ademuk/django-oscar,machtfit/django-oscar,binarydud/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj.com,jmt4/django-oscar,adamend/django-oscar,DrOctogon/unwash_ecom,django-oscar/django-oscar,WillisXChen/django-oscar,DrOctogon/unwash_ecom
|
from django.conf import settings
from django.contrib.auth.models import User
+ from django.core.exceptions import ImproperlyConfigured
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
+ try:
- AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
+ AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
+ except ValueError:
+ raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
Use better exception for AUTH_USER_MODEL
|
## Code Before:
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
## Instruction:
Use better exception for AUTH_USER_MODEL
## Code After:
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
try:
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
from django.conf import settings
from django.contrib.auth.models import User
+ from django.core.exceptions import ImproperlyConfigured
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
+ try:
- AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
+ AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
? ++++
+ except ValueError:
+ raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
bb82e3a8519009311ede80f877844565c49384b4
|
examples/qidle/qidle.py
|
examples/qidle/qidle.py
|
from qidle import main
import logging
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main()
|
import sys
from qidle import main
import logging
if __name__ == '__main__':
filename = None
if sys.platform == 'win32':
filename = 'qidle.log'
logging.basicConfig(level=logging.INFO, filename=filename)
main()
|
Add a log file on windows
|
Add a log file on windows
|
Python
|
mit
|
pyQode/pyqode.python,pyQode/pyqode.python,zwadar/pyqode.python,mmolero/pyqode.python
|
+ import sys
from qidle import main
import logging
if __name__ == '__main__':
+ filename = None
+ if sys.platform == 'win32':
+ filename = 'qidle.log'
- logging.basicConfig(level=logging.INFO)
+ logging.basicConfig(level=logging.INFO, filename=filename)
main()
|
Add a log file on windows
|
## Code Before:
from qidle import main
import logging
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main()
## Instruction:
Add a log file on windows
## Code After:
import sys
from qidle import main
import logging
if __name__ == '__main__':
filename = None
if sys.platform == 'win32':
filename = 'qidle.log'
logging.basicConfig(level=logging.INFO, filename=filename)
main()
|
+ import sys
from qidle import main
import logging
if __name__ == '__main__':
+ filename = None
+ if sys.platform == 'win32':
+ filename = 'qidle.log'
- logging.basicConfig(level=logging.INFO)
+ logging.basicConfig(level=logging.INFO, filename=filename)
? +++++++++++++++++++
main()
|
8883f1a45595219ae843b3400df1f56ab07aa4fe
|
corehq/apps/userreports/document_stores.py
|
corehq/apps/userreports/document_stores.py
|
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
|
from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.dao.interface import ReadOnlyDocumentStore
class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.queryset = SQLLocation.objects.filter(domain=domain)
def get_document(self, doc_id):
try:
return self.queryset.get(location_id=doc_id).to_json()
except SQLLocation.DoesNotExist as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
return iter(self.queryset.location_ids())
def iter_documents(self, ids):
for location in self.queryset.filter(location_id__in=ids):
yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
elif doc_type == 'Location':
return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
|
Add document store for locations
|
Add document store for locations
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
+ from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
+ from pillowtop.dao.exceptions import DocumentNotFoundError
+ from pillowtop.dao.interface import ReadOnlyDocumentStore
+
+
+ class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
+
+ def __init__(self, domain):
+ self.domain = domain
+ self.queryset = SQLLocation.objects.filter(domain=domain)
+
+ def get_document(self, doc_id):
+ try:
+ return self.queryset.get(location_id=doc_id).to_json()
+ except SQLLocation.DoesNotExist as e:
+ raise DocumentNotFoundError(e)
+
+ def iter_document_ids(self, last_id=None):
+ return iter(self.queryset.location_ids())
+
+ def iter_documents(self, ids):
+ for location in self.queryset.filter(location_id__in=ids):
+ yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
+ elif doc_type == 'Location':
+ return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
|
Add document store for locations
|
## Code Before:
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
## Instruction:
Add document store for locations
## Code After:
from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.dao.interface import ReadOnlyDocumentStore
class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.queryset = SQLLocation.objects.filter(domain=domain)
def get_document(self, doc_id):
try:
return self.queryset.get(location_id=doc_id).to_json()
except SQLLocation.DoesNotExist as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
return iter(self.queryset.location_ids())
def iter_documents(self, ids):
for location in self.queryset.filter(location_id__in=ids):
yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
elif doc_type == 'Location':
return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
|
+ from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
+ from pillowtop.dao.exceptions import DocumentNotFoundError
+ from pillowtop.dao.interface import ReadOnlyDocumentStore
+
+
+ class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
+
+ def __init__(self, domain):
+ self.domain = domain
+ self.queryset = SQLLocation.objects.filter(domain=domain)
+
+ def get_document(self, doc_id):
+ try:
+ return self.queryset.get(location_id=doc_id).to_json()
+ except SQLLocation.DoesNotExist as e:
+ raise DocumentNotFoundError(e)
+
+ def iter_document_ids(self, last_id=None):
+ return iter(self.queryset.location_ids())
+
+ def iter_documents(self, ids):
+ for location in self.queryset.filter(location_id__in=ids):
+ yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
+ elif doc_type == 'Location':
+ return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
|
f5d56b0c54af414f02721a1a02a0eaf80dbba898
|
client/python/unrealcv/util.py
|
client/python/unrealcv/util.py
|
import numpy as np
import PIL
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
import PIL.Image
img = PIL.Image.open(BytesIO(res))
return np.asarray(img)
def read_npy(res):
# res is a binary buffer
return np.load(BytesIO(res))
|
import numpy as np
import PIL.Image
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
img = None
try:
PIL_img = PIL.Image.open(BytesIO(res))
img = np.asarray(PIL_img)
except:
print('Read png can not parse response %s' % str(res[:20]))
return img
def read_npy(res):
# res is a binary buffer
arr = None
try:
arr = np.load(BytesIO(res))
except:
print('Read npy can not parse response %s' % str(res[:20]))
return arr
|
Handle exceptions in read_png and read_npy.
|
Handle exceptions in read_png and read_npy.
|
Python
|
mit
|
unrealcv/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv
|
import numpy as np
- import PIL
+ import PIL.Image
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
- import PIL.Image
+ img = None
+ try:
- img = PIL.Image.open(BytesIO(res))
+ PIL_img = PIL.Image.open(BytesIO(res))
- return np.asarray(img)
+ img = np.asarray(PIL_img)
+ except:
+ print('Read png can not parse response %s' % str(res[:20]))
+ return img
def read_npy(res):
# res is a binary buffer
+ arr = None
+ try:
- return np.load(BytesIO(res))
+ arr = np.load(BytesIO(res))
+ except:
+ print('Read npy can not parse response %s' % str(res[:20]))
+ return arr
|
Handle exceptions in read_png and read_npy.
|
## Code Before:
import numpy as np
import PIL
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
import PIL.Image
img = PIL.Image.open(BytesIO(res))
return np.asarray(img)
def read_npy(res):
# res is a binary buffer
return np.load(BytesIO(res))
## Instruction:
Handle exceptions in read_png and read_npy.
## Code After:
import numpy as np
import PIL.Image
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
img = None
try:
PIL_img = PIL.Image.open(BytesIO(res))
img = np.asarray(PIL_img)
except:
print('Read png can not parse response %s' % str(res[:20]))
return img
def read_npy(res):
# res is a binary buffer
arr = None
try:
arr = np.load(BytesIO(res))
except:
print('Read npy can not parse response %s' % str(res[:20]))
return arr
|
import numpy as np
- import PIL
+ import PIL.Image
? ++++++
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
- import PIL.Image
+ img = None
+ try:
- img = PIL.Image.open(BytesIO(res))
+ PIL_img = PIL.Image.open(BytesIO(res))
? ++++++++
- return np.asarray(img)
+ img = np.asarray(PIL_img)
+ except:
+ print('Read png can not parse response %s' % str(res[:20]))
+ return img
def read_npy(res):
# res is a binary buffer
+ arr = None
+ try:
- return np.load(BytesIO(res))
? --- ^
+ arr = np.load(BytesIO(res))
? +++++ ^^
+ except:
+ print('Read npy can not parse response %s' % str(res[:20]))
+ return arr
|
5496f501ff7da677ee76c442b6a5b544d595ce1d
|
epages/__init__.py
|
epages/__init__.py
|
from epages.client import *
from epages.product_service import *
from epages.shop_service import *
from epages.shop import *
|
from epages.client import *
from epages.shop_service import *
from epages.shop import *
|
Remove product_service from epages package
|
Remove product_service from epages package
|
Python
|
mit
|
ooz/epages-rest-python,ooz/epages-rest-python
|
from epages.client import *
- from epages.product_service import *
from epages.shop_service import *
from epages.shop import *
|
Remove product_service from epages package
|
## Code Before:
from epages.client import *
from epages.product_service import *
from epages.shop_service import *
from epages.shop import *
## Instruction:
Remove product_service from epages package
## Code After:
from epages.client import *
from epages.shop_service import *
from epages.shop import *
|
from epages.client import *
- from epages.product_service import *
from epages.shop_service import *
from epages.shop import *
|
c638dbf619030c8d207e3bfd2e711da7c6c5cdf4
|
passman.py
|
passman.py
|
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, writeToFile
def main():
while True:
service = getServiceFromUser()
pw = getPasswordFromUser()
writeToFile(service, pw)
# run the program
showSplash()
main()
|
import hashlib
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, \
getUserInput, handleLogin, welcomeMessage, showMenu
from database import addUser, getAllServices, checkIfServiceExists, \
addService, removeService, updateService, getServiceByName
def main():
welcomeMessage()
handleLogin()
while True:
showMenu()
# run the program
#showSplash()
main()
|
Clean up main a bit
|
Clean up main a bit
|
Python
|
mit
|
regexpressyourself/passman
|
+
+ import hashlib
from splash import showSplash
- from functions import quit, getServiceFromUser, getPasswordFromUser, writeToFile
+ from functions import quit, getServiceFromUser, getPasswordFromUser, \
+ getUserInput, handleLogin, welcomeMessage, showMenu
+
+ from database import addUser, getAllServices, checkIfServiceExists, \
+ addService, removeService, updateService, getServiceByName
def main():
+ welcomeMessage()
+ handleLogin()
+
+
while True:
+ showMenu()
- service = getServiceFromUser()
- pw = getPasswordFromUser()
- writeToFile(service, pw)
# run the program
- showSplash()
+ #showSplash()
main()
|
Clean up main a bit
|
## Code Before:
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, writeToFile
def main():
while True:
service = getServiceFromUser()
pw = getPasswordFromUser()
writeToFile(service, pw)
# run the program
showSplash()
main()
## Instruction:
Clean up main a bit
## Code After:
import hashlib
from splash import showSplash
from functions import quit, getServiceFromUser, getPasswordFromUser, \
getUserInput, handleLogin, welcomeMessage, showMenu
from database import addUser, getAllServices, checkIfServiceExists, \
addService, removeService, updateService, getServiceByName
def main():
welcomeMessage()
handleLogin()
while True:
showMenu()
# run the program
#showSplash()
main()
|
+
+ import hashlib
from splash import showSplash
- from functions import quit, getServiceFromUser, getPasswordFromUser, writeToFile
? ^^^^^^^^^^^
+ from functions import quit, getServiceFromUser, getPasswordFromUser, \
? ^
+ getUserInput, handleLogin, welcomeMessage, showMenu
+
+ from database import addUser, getAllServices, checkIfServiceExists, \
+ addService, removeService, updateService, getServiceByName
def main():
+ welcomeMessage()
+ handleLogin()
+
+
while True:
+ showMenu()
- service = getServiceFromUser()
- pw = getPasswordFromUser()
- writeToFile(service, pw)
# run the program
- showSplash()
+ #showSplash()
? +
main()
|
b4fa43b85a162fa9bef3cb67c2dd523f25707b4d
|
mo/cli.py
|
mo/cli.py
|
from argparse import ArgumentParser
import yaml
from .runner import Runner
def parse_variables(args):
variables = {}
if args is not None:
for variable in args:
tokens = variable.split('=')
name = tokens[0]
value = '='.join(tokens[1:])
variables[name] = value
return variables
def main():
parser = ArgumentParser()
parser.add_argument('-f', '--file', default='mo.yaml')
parser.add_argument('-v', '--var', dest='variables', nargs='*')
parser.add_argument('tasks', metavar='task', nargs='+')
args = parser.parse_args()
with open(args.file) as file:
configuration = yaml.load(file.read())
variables = parse_variables(args.variables)
runner = Runner(configuration, variables)
for task in args.tasks:
runner.run_task(task)
|
from argparse import ArgumentParser
import yaml
from .runner import Runner
def parse_variables(args):
variables = {}
if args is not None:
for variable in args:
tokens = variable.split('=')
name = tokens[0]
value = '='.join(tokens[1:])
variables[name] = value
return variables
def main():
parser = ArgumentParser()
parser.add_argument('-f', '--file', default='mo.yaml')
parser.add_argument('-v', '--var', dest='variables', nargs='*')
parser.add_argument('tasks', metavar='task', nargs='*')
args = parser.parse_args()
with open(args.file) as file:
configuration = yaml.load(file.read())
variables = parse_variables(args.variables)
runner = Runner(configuration, variables)
if args.tasks is None:
for task in args.tasks:
runner.run_task(task)
else:
print()
for task in runner.tasks.values():
print('', task.name, '-', task.description)
|
Add a way of listing commands
|
Add a way of listing commands
|
Python
|
mit
|
thomasleese/mo
|
from argparse import ArgumentParser
import yaml
from .runner import Runner
def parse_variables(args):
variables = {}
if args is not None:
for variable in args:
tokens = variable.split('=')
name = tokens[0]
value = '='.join(tokens[1:])
variables[name] = value
return variables
def main():
parser = ArgumentParser()
parser.add_argument('-f', '--file', default='mo.yaml')
parser.add_argument('-v', '--var', dest='variables', nargs='*')
- parser.add_argument('tasks', metavar='task', nargs='+')
+ parser.add_argument('tasks', metavar='task', nargs='*')
args = parser.parse_args()
with open(args.file) as file:
configuration = yaml.load(file.read())
variables = parse_variables(args.variables)
runner = Runner(configuration, variables)
+ if args.tasks is None:
- for task in args.tasks:
+ for task in args.tasks:
- runner.run_task(task)
+ runner.run_task(task)
+ else:
+ print()
+ for task in runner.tasks.values():
+ print('', task.name, '-', task.description)
|
Add a way of listing commands
|
## Code Before:
from argparse import ArgumentParser
import yaml
from .runner import Runner
def parse_variables(args):
variables = {}
if args is not None:
for variable in args:
tokens = variable.split('=')
name = tokens[0]
value = '='.join(tokens[1:])
variables[name] = value
return variables
def main():
parser = ArgumentParser()
parser.add_argument('-f', '--file', default='mo.yaml')
parser.add_argument('-v', '--var', dest='variables', nargs='*')
parser.add_argument('tasks', metavar='task', nargs='+')
args = parser.parse_args()
with open(args.file) as file:
configuration = yaml.load(file.read())
variables = parse_variables(args.variables)
runner = Runner(configuration, variables)
for task in args.tasks:
runner.run_task(task)
## Instruction:
Add a way of listing commands
## Code After:
from argparse import ArgumentParser
import yaml
from .runner import Runner
def parse_variables(args):
variables = {}
if args is not None:
for variable in args:
tokens = variable.split('=')
name = tokens[0]
value = '='.join(tokens[1:])
variables[name] = value
return variables
def main():
parser = ArgumentParser()
parser.add_argument('-f', '--file', default='mo.yaml')
parser.add_argument('-v', '--var', dest='variables', nargs='*')
parser.add_argument('tasks', metavar='task', nargs='*')
args = parser.parse_args()
with open(args.file) as file:
configuration = yaml.load(file.read())
variables = parse_variables(args.variables)
runner = Runner(configuration, variables)
if args.tasks is None:
for task in args.tasks:
runner.run_task(task)
else:
print()
for task in runner.tasks.values():
print('', task.name, '-', task.description)
|
from argparse import ArgumentParser
import yaml
from .runner import Runner
def parse_variables(args):
variables = {}
if args is not None:
for variable in args:
tokens = variable.split('=')
name = tokens[0]
value = '='.join(tokens[1:])
variables[name] = value
return variables
def main():
parser = ArgumentParser()
parser.add_argument('-f', '--file', default='mo.yaml')
parser.add_argument('-v', '--var', dest='variables', nargs='*')
- parser.add_argument('tasks', metavar='task', nargs='+')
? ^
+ parser.add_argument('tasks', metavar='task', nargs='*')
? ^
args = parser.parse_args()
with open(args.file) as file:
configuration = yaml.load(file.read())
variables = parse_variables(args.variables)
runner = Runner(configuration, variables)
+ if args.tasks is None:
- for task in args.tasks:
+ for task in args.tasks:
? ++++
- runner.run_task(task)
+ runner.run_task(task)
? ++++
+ else:
+ print()
+ for task in runner.tasks.values():
+ print('', task.name, '-', task.description)
|
8e61726b178c5175347008b9b77032fd223b6114
|
elections_r_us/security.py
|
elections_r_us/security.py
|
from models import User
from passlib.apps import custom_app_context as pwd_context
def create_user(session, username, password):
"""Add a new user to the database.
session is expected to be a dbsession, username and password are
expected to be (unencrypted) unicode strings."""
session.add(User(
username=username,
password=pwd_context.encrypt(password)
))
def check_login(session, username, password):
"""Return whether username and password match in the database.
If username is not present, returns False."""
try:
query = session.query(User).filter(User.username == username).first()
return pwd_context.verify(password, query.password)
except AttributeError:
return False
|
from .models import User
from passlib.apps import custom_app_context as pwd_context
def create_user(session, username, password):
"""Add a new user to the database.
session is expected to be a dbsession, username and password are
expected to be (unencrypted) unicode strings."""
session.add(User(
username=username,
password=pwd_context.encrypt(password)
))
def check_login(session, username, password):
"""Return whether username and password match in the database.
If username is not present, returns False."""
query = session.query(User).filter(User.username == username).first()
try:
return pwd_context.verify(password, query.password)
except AttributeError:
return False
|
Move query assignment out of try block
|
Move query assignment out of try block
make import relative
|
Python
|
mit
|
Elections-R-Us/Elections-R-Us,Elections-R-Us/Elections-R-Us,Elections-R-Us/Elections-R-Us,Elections-R-Us/Elections-R-Us
|
- from models import User
+ from .models import User
from passlib.apps import custom_app_context as pwd_context
def create_user(session, username, password):
"""Add a new user to the database.
session is expected to be a dbsession, username and password are
expected to be (unencrypted) unicode strings."""
session.add(User(
username=username,
password=pwd_context.encrypt(password)
))
def check_login(session, username, password):
"""Return whether username and password match in the database.
If username is not present, returns False."""
+ query = session.query(User).filter(User.username == username).first()
try:
- query = session.query(User).filter(User.username == username).first()
return pwd_context.verify(password, query.password)
except AttributeError:
return False
|
Move query assignment out of try block
|
## Code Before:
from models import User
from passlib.apps import custom_app_context as pwd_context
def create_user(session, username, password):
"""Add a new user to the database.
session is expected to be a dbsession, username and password are
expected to be (unencrypted) unicode strings."""
session.add(User(
username=username,
password=pwd_context.encrypt(password)
))
def check_login(session, username, password):
"""Return whether username and password match in the database.
If username is not present, returns False."""
try:
query = session.query(User).filter(User.username == username).first()
return pwd_context.verify(password, query.password)
except AttributeError:
return False
## Instruction:
Move query assignment out of try block
## Code After:
from .models import User
from passlib.apps import custom_app_context as pwd_context
def create_user(session, username, password):
"""Add a new user to the database.
session is expected to be a dbsession, username and password are
expected to be (unencrypted) unicode strings."""
session.add(User(
username=username,
password=pwd_context.encrypt(password)
))
def check_login(session, username, password):
"""Return whether username and password match in the database.
If username is not present, returns False."""
query = session.query(User).filter(User.username == username).first()
try:
return pwd_context.verify(password, query.password)
except AttributeError:
return False
|
- from models import User
+ from .models import User
? +
from passlib.apps import custom_app_context as pwd_context
def create_user(session, username, password):
"""Add a new user to the database.
session is expected to be a dbsession, username and password are
expected to be (unencrypted) unicode strings."""
session.add(User(
username=username,
password=pwd_context.encrypt(password)
))
def check_login(session, username, password):
"""Return whether username and password match in the database.
If username is not present, returns False."""
+ query = session.query(User).filter(User.username == username).first()
try:
- query = session.query(User).filter(User.username == username).first()
return pwd_context.verify(password, query.password)
except AttributeError:
return False
|
d58fa915665c3a2c99588bb19bfaf14e6728371f
|
channels/__init__.py
|
channels/__init__.py
|
import django
__version__ = "2.4.0"
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
DEFAULT_CHANNEL_LAYER = "default"
|
__version__ = "2.4.0"
try:
import django
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
except ModuleNotFoundError:
pass
DEFAULT_CHANNEL_LAYER = "default"
|
Fix RTD build for missing Django dependency.
|
Fix RTD build for missing Django dependency.
|
Python
|
bsd-3-clause
|
andrewgodwin/channels,django/channels,andrewgodwin/django-channels
|
- import django
-
__version__ = "2.4.0"
+ try:
+ import django
+
- if django.VERSION < (3, 2):
+ if django.VERSION < (3, 2):
- default_app_config = "channels.apps.ChannelsConfig"
+ default_app_config = "channels.apps.ChannelsConfig"
+ except ModuleNotFoundError:
+ pass
+
DEFAULT_CHANNEL_LAYER = "default"
|
Fix RTD build for missing Django dependency.
|
## Code Before:
import django
__version__ = "2.4.0"
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
DEFAULT_CHANNEL_LAYER = "default"
## Instruction:
Fix RTD build for missing Django dependency.
## Code After:
__version__ = "2.4.0"
try:
import django
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
except ModuleNotFoundError:
pass
DEFAULT_CHANNEL_LAYER = "default"
|
- import django
-
__version__ = "2.4.0"
+ try:
+ import django
+
- if django.VERSION < (3, 2):
+ if django.VERSION < (3, 2):
? ++++
- default_app_config = "channels.apps.ChannelsConfig"
+ default_app_config = "channels.apps.ChannelsConfig"
? ++++
+ except ModuleNotFoundError:
+ pass
+
DEFAULT_CHANNEL_LAYER = "default"
|
d650cbe26ce0fcc4c5146466d2827b930c153b0f
|
PlatformPhysicsOperation.py
|
PlatformPhysicsOperation.py
|
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._translation = translation
def undo(self):
pass
def redo(self):
pass
def mergeWith(self, other):
if type(other) is AddSceneNodeOperation:
other._node.translate(self._translation)
return other
elif type(other) is TranslateOperation:
other._translation += self._translation
return other
else:
return False
|
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._transform = node.getLocalTransformation()
self._position = node.getPosition() + translation
self._always_merge = True
def undo(self):
self._node.setLocalTransformation(self._transform)
def redo(self):
self._node.setPosition(self._position)
def mergeWith(self, other):
group = GroupedOperation()
group.addOperation(self)
group.addOperation(other)
return group
def __repr__(self):
return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
|
Use GroupedOperation for merging PlatformPhyisicsOperation
|
Use GroupedOperation for merging PlatformPhyisicsOperation
|
Python
|
agpl-3.0
|
senttech/Cura,quillford/Cura,fxtentacle/Cura,hmflash/Cura,totalretribution/Cura,ynotstartups/Wanhao,bq/Ultimaker-Cura,lo0ol/Ultimaker-Cura,lo0ol/Ultimaker-Cura,DeskboxBrazil/Cura,derekhe/Cura,fxtentacle/Cura,fieldOfView/Cura,quillford/Cura,fieldOfView/Cura,derekhe/Cura,senttech/Cura,totalretribution/Cura,DeskboxBrazil/Cura,Curahelper/Cura,hmflash/Cura,bq/Ultimaker-Cura,ad1217/Cura,ynotstartups/Wanhao,ad1217/Cura,markwal/Cura,Curahelper/Cura,markwal/Cura
|
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
+ from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
- self._translation = translation
+ self._transform = node.getLocalTransformation()
+ self._position = node.getPosition() + translation
+ self._always_merge = True
def undo(self):
- pass
+ self._node.setLocalTransformation(self._transform)
def redo(self):
- pass
+ self._node.setPosition(self._position)
def mergeWith(self, other):
+ group = GroupedOperation()
- if type(other) is AddSceneNodeOperation:
- other._node.translate(self._translation)
- return other
- elif type(other) is TranslateOperation:
- other._translation += self._translation
- return other
- else:
- return False
+ group.addOperation(self)
+ group.addOperation(other)
+ return group
+
+ def __repr__(self):
+ return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
+
|
Use GroupedOperation for merging PlatformPhyisicsOperation
|
## Code Before:
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._translation = translation
def undo(self):
pass
def redo(self):
pass
def mergeWith(self, other):
if type(other) is AddSceneNodeOperation:
other._node.translate(self._translation)
return other
elif type(other) is TranslateOperation:
other._translation += self._translation
return other
else:
return False
## Instruction:
Use GroupedOperation for merging PlatformPhyisicsOperation
## Code After:
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._transform = node.getLocalTransformation()
self._position = node.getPosition() + translation
self._always_merge = True
def undo(self):
self._node.setLocalTransformation(self._transform)
def redo(self):
self._node.setPosition(self._position)
def mergeWith(self, other):
group = GroupedOperation()
group.addOperation(self)
group.addOperation(other)
return group
def __repr__(self):
return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
|
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
+ from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
- self._translation = translation
+ self._transform = node.getLocalTransformation()
+ self._position = node.getPosition() + translation
+ self._always_merge = True
def undo(self):
- pass
+ self._node.setLocalTransformation(self._transform)
def redo(self):
- pass
+ self._node.setPosition(self._position)
def mergeWith(self, other):
+ group = GroupedOperation()
- if type(other) is AddSceneNodeOperation:
- other._node.translate(self._translation)
- return other
- elif type(other) is TranslateOperation:
- other._translation += self._translation
- return other
- else:
- return False
+ group.addOperation(self)
+ group.addOperation(other)
+
+ return group
+
+ def __repr__(self):
+ return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
|
fd5e21705d8f7757cf345c8c98af260203c44517
|
malcolm/modules/__init__.py
|
malcolm/modules/__init__.py
|
class Importer(object):
def __init__(self):
self.update_dict = {}
self.ignore = ["docs"]
def import_subpackages(self, path):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if f not in self.ignore and os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.info("Importing %s failed", name, exc_info=True)
# Try the import of subpackages too
self.import_subpackages(path)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
class Importer(object):
def __init__(self):
self.update_dict = {}
self.dirnames = [
"vmetas", "infos", "controllers", "parts", "includes", "blocks"]
def import_subpackages(self, path, filter=()):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if not filter or f in filter:
if os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
# Try the import of subpackages too
self.import_subpackages(path + [f], self.dirnames)
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.warning("Importing %s failed", name, exc_info=True)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
Improve import logic for clearer error messages
|
Improve import logic for clearer error messages
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
class Importer(object):
def __init__(self):
self.update_dict = {}
- self.ignore = ["docs"]
+ self.dirnames = [
+ "vmetas", "infos", "controllers", "parts", "includes", "blocks"]
- def import_subpackages(self, path):
+ def import_subpackages(self, path, filter=()):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
+ if not filter or f in filter:
- if f not in self.ignore and os.path.isdir(os.path.join(dirname, f)):
+ if os.path.isdir(os.path.join(dirname, f)):
- self.try_import_path(path + [f])
+ self.try_import_path(path + [f])
+ # Try the import of subpackages too
+ self.import_subpackages(path + [f], self.dirnames)
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
- log.info("Importing %s failed", name, exc_info=True)
+ log.warning("Importing %s failed", name, exc_info=True)
- # Try the import of subpackages too
- self.import_subpackages(path)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
Improve import logic for clearer error messages
|
## Code Before:
class Importer(object):
def __init__(self):
self.update_dict = {}
self.ignore = ["docs"]
def import_subpackages(self, path):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if f not in self.ignore and os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.info("Importing %s failed", name, exc_info=True)
# Try the import of subpackages too
self.import_subpackages(path)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
## Instruction:
Improve import logic for clearer error messages
## Code After:
class Importer(object):
def __init__(self):
self.update_dict = {}
self.dirnames = [
"vmetas", "infos", "controllers", "parts", "includes", "blocks"]
def import_subpackages(self, path, filter=()):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if not filter or f in filter:
if os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
# Try the import of subpackages too
self.import_subpackages(path + [f], self.dirnames)
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.warning("Importing %s failed", name, exc_info=True)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
class Importer(object):
def __init__(self):
self.update_dict = {}
- self.ignore = ["docs"]
+ self.dirnames = [
+ "vmetas", "infos", "controllers", "parts", "includes", "blocks"]
- def import_subpackages(self, path):
+ def import_subpackages(self, path, filter=()):
? +++++++++++
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
+ if not filter or f in filter:
- if f not in self.ignore and os.path.isdir(os.path.join(dirname, f)):
? -------------------------
+ if os.path.isdir(os.path.join(dirname, f)):
? ++++
- self.try_import_path(path + [f])
+ self.try_import_path(path + [f])
? ++++
+ # Try the import of subpackages too
+ self.import_subpackages(path + [f], self.dirnames)
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
- log.info("Importing %s failed", name, exc_info=True)
? ^^
+ log.warning("Importing %s failed", name, exc_info=True)
? ++++ ^
- # Try the import of subpackages too
- self.import_subpackages(path)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
72d33ea47458cace13dac920ce2a82e55f83caba
|
statsmodels/stats/tests/test_outliers_influence.py
|
statsmodels/stats/tests/test_outliers_influence.py
|
from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime, get_rdataset
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
|
from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
|
Add pandas dataframe capability in variance_inflation_factor
|
ENH: Add pandas dataframe capability in variance_inflation_factor
|
Python
|
bsd-3-clause
|
bashtage/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,bashtage/statsmodels,bashtage/statsmodels,bashtage/statsmodels,statsmodels/statsmodels
|
from numpy.testing import assert_almost_equal
- from statsmodels.datasets import statecrime, get_rdataset
+ from statsmodels.datasets import statecrime
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
|
Add pandas dataframe capability in variance_inflation_factor
|
## Code Before:
from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime, get_rdataset
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
## Instruction:
Add pandas dataframe capability in variance_inflation_factor
## Code After:
from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
|
from numpy.testing import assert_almost_equal
- from statsmodels.datasets import statecrime, get_rdataset
? --------------
+ from statsmodels.datasets import statecrime
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
|
0c0f56dba4b9f08f4cb443f2668cdee51fe80c32
|
chapter02/fahrenheitToCelsius.py
|
chapter02/fahrenheitToCelsius.py
|
F = input("Gimme Fahrenheit: ")
print (F-32) * 5 / 9
print (F-32) / 1.8000
|
fahrenheit = input("Gimme Fahrenheit: ")
print (fahrenheit-32) * 5 / 9
print (fahrenheit-32) / 1.8000
|
Change variable name to fahrenheit
|
Change variable name to fahrenheit
|
Python
|
apache-2.0
|
MindCookin/python-exercises
|
- F = input("Gimme Fahrenheit: ")
+ fahrenheit = input("Gimme Fahrenheit: ")
- print (F-32) * 5 / 9
+ print (fahrenheit-32) * 5 / 9
- print (F-32) / 1.8000
+ print (fahrenheit-32) / 1.8000
|
Change variable name to fahrenheit
|
## Code Before:
F = input("Gimme Fahrenheit: ")
print (F-32) * 5 / 9
print (F-32) / 1.8000
## Instruction:
Change variable name to fahrenheit
## Code After:
fahrenheit = input("Gimme Fahrenheit: ")
print (fahrenheit-32) * 5 / 9
print (fahrenheit-32) / 1.8000
|
- F = input("Gimme Fahrenheit: ")
? ^
+ fahrenheit = input("Gimme Fahrenheit: ")
? ^^^^^^^^^^
- print (F-32) * 5 / 9
? ^
+ print (fahrenheit-32) * 5 / 9
? ^^^^^^^^^^
- print (F-32) / 1.8000
? ^
+ print (fahrenheit-32) / 1.8000
? ^^^^^^^^^^
|
3ecc978421e1bcceb30635e875333e52272e07a3
|
tests/providers/test_ovh.py
|
tests/providers/test_ovh.py
|
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['domain'] = self.domain
return cmd_options
|
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['auth_entrypoint'] = 'ovh-eu'
cmd_options['domain'] = self.domain
return cmd_options
|
Select ovh-eu entrypoint for test integration
|
Select ovh-eu entrypoint for test integration
|
Python
|
mit
|
tnwhitwell/lexicon,AnalogJ/lexicon,AnalogJ/lexicon,tnwhitwell/lexicon
|
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
+ cmd_options['auth_entrypoint'] = 'ovh-eu'
cmd_options['domain'] = self.domain
return cmd_options
|
Select ovh-eu entrypoint for test integration
|
## Code Before:
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['domain'] = self.domain
return cmd_options
## Instruction:
Select ovh-eu entrypoint for test integration
## Code After:
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['auth_entrypoint'] = 'ovh-eu'
cmd_options['domain'] = self.domain
return cmd_options
|
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
+ cmd_options['auth_entrypoint'] = 'ovh-eu'
cmd_options['domain'] = self.domain
return cmd_options
|
3d7e02e58353fdc3290440344efd5591d233f449
|
bot/__init__.py
|
bot/__init__.py
|
import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
|
import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
from .webapp import goldstarsapp
|
Make the flask app easier to import
|
Make the flask app easier to import
|
Python
|
mit
|
barentsen/AstroGoldStars,barentsen/AstroGoldStars
|
import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
+ from .webapp import goldstarsapp
+
|
Make the flask app easier to import
|
## Code Before:
import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
## Instruction:
Make the flask app easier to import
## Code After:
import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
from .webapp import goldstarsapp
|
import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
+
+ from .webapp import goldstarsapp
|
d9e65fbf111f8584189a57059516afafb1e4d04c
|
test/projection_test.py
|
test/projection_test.py
|
from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
import pytest
def test_l1_no_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
def test_l1_with_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
|
from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
class ProjectionTest(tf.test.TestCase):
def test_l1_no_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_with_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_multiple_rows(self):
patient = l1_projection_to_simplex(
tf.transpose(
tf.constant(
[
[2.0, 8.0, -5.0],
[9.5, 0.4, 0.1]
]
)
)
)
with self.test_session():
self.assertAllClose(
tf.transpose(patient).eval(),
[
[0.2, 0.8, 0.0],
[0.95, 0.04, 0.01]
]
)
if __name__ == '__main__':
tf.test.main()
|
Use TensorFlow's test utilities and add a test for the L1 projection to simplex.
|
Use TensorFlow's test utilities and add a test for the L1 projection to simplex.
|
Python
|
mit
|
AmiiThinks/amii-tf-nn
|
from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
- import pytest
+ class ProjectionTest(tf.test.TestCase):
- def test_l1_no_negative():
+ def test_l1_no_negative(self):
+ with self.test_session():
+ self.assertAllClose(
- patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0]))
+ l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0])).eval(),
- with tf.Session() as sess:
- print(sess.run(patient))
- strat = sess.run(patient)
- x_strat = [0.2, 0.8, 0.0]
+ [0.2, 0.8, 0.0]
- assert len(strat) == len(x_strat)
- for i in range(len(strat)):
- assert strat[i] == pytest.approx(x_strat[i])
+ )
+
+ def test_l1_with_negative(self):
+ with self.test_session():
+ self.assertAllClose(
+ l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0])).eval(),
+ [0.2, 0.8, 0.0]
+ )
+
+ def test_l1_multiple_rows(self):
+ patient = l1_projection_to_simplex(
+ tf.transpose(
+ tf.constant(
+ [
+ [2.0, 8.0, -5.0],
+ [9.5, 0.4, 0.1]
+ ]
+ )
+ )
+ )
+ with self.test_session():
+ self.assertAllClose(
+ tf.transpose(patient).eval(),
+ [
+ [0.2, 0.8, 0.0],
+ [0.95, 0.04, 0.01]
+ ]
+ )
+ if __name__ == '__main__':
+ tf.test.main()
- def test_l1_with_negative():
- patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0]))
- with tf.Session() as sess:
- print(sess.run(patient))
- strat = sess.run(patient)
- x_strat = [0.2, 0.8, 0.0]
- assert len(strat) == len(x_strat)
- for i in range(len(strat)):
- assert strat[i] == pytest.approx(x_strat[i])
|
Use TensorFlow's test utilities and add a test for the L1 projection to simplex.
|
## Code Before:
from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
import pytest
def test_l1_no_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
def test_l1_with_negative():
patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0]))
with tf.Session() as sess:
print(sess.run(patient))
strat = sess.run(patient)
x_strat = [0.2, 0.8, 0.0]
assert len(strat) == len(x_strat)
for i in range(len(strat)):
assert strat[i] == pytest.approx(x_strat[i])
## Instruction:
Use TensorFlow's test utilities and add a test for the L1 projection to simplex.
## Code After:
from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
class ProjectionTest(tf.test.TestCase):
def test_l1_no_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_with_negative(self):
with self.test_session():
self.assertAllClose(
l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0])).eval(),
[0.2, 0.8, 0.0]
)
def test_l1_multiple_rows(self):
patient = l1_projection_to_simplex(
tf.transpose(
tf.constant(
[
[2.0, 8.0, -5.0],
[9.5, 0.4, 0.1]
]
)
)
)
with self.test_session():
self.assertAllClose(
tf.transpose(patient).eval(),
[
[0.2, 0.8, 0.0],
[0.95, 0.04, 0.01]
]
)
if __name__ == '__main__':
tf.test.main()
|
from amii_tf_nn.projection import l1_projection_to_simplex
import tensorflow as tf
- import pytest
+ class ProjectionTest(tf.test.TestCase):
- def test_l1_no_negative():
+ def test_l1_no_negative(self):
? ++++ ++++
+ with self.test_session():
+ self.assertAllClose(
- patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0]))
? ------- ^
+ l1_projection_to_simplex(tf.constant([2.0, 8.0, 0.0])).eval(),
? ^^^^^^^^^^ ++++++++
- with tf.Session() as sess:
- print(sess.run(patient))
- strat = sess.run(patient)
- x_strat = [0.2, 0.8, 0.0]
? ------- ^
+ [0.2, 0.8, 0.0]
? ^^^^^^
- assert len(strat) == len(x_strat)
- for i in range(len(strat)):
- assert strat[i] == pytest.approx(x_strat[i])
+ )
+
+ def test_l1_with_negative(self):
+ with self.test_session():
+ self.assertAllClose(
+ l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0])).eval(),
+ [0.2, 0.8, 0.0]
+ )
+
+ def test_l1_multiple_rows(self):
+ patient = l1_projection_to_simplex(
+ tf.transpose(
+ tf.constant(
+ [
+ [2.0, 8.0, -5.0],
+ [9.5, 0.4, 0.1]
+ ]
+ )
+ )
+ )
+ with self.test_session():
+ self.assertAllClose(
+ tf.transpose(patient).eval(),
+ [
+ [0.2, 0.8, 0.0],
+ [0.95, 0.04, 0.01]
+ ]
+ )
+ if __name__ == '__main__':
+ tf.test.main()
- def test_l1_with_negative():
- patient = l1_projection_to_simplex(tf.constant([2.0, 8.0, -5.0]))
- with tf.Session() as sess:
- print(sess.run(patient))
- strat = sess.run(patient)
- x_strat = [0.2, 0.8, 0.0]
- assert len(strat) == len(x_strat)
- for i in range(len(strat)):
- assert strat[i] == pytest.approx(x_strat[i])
|
2f2ae3308256d2233e0363cb46ee88067da54b4b
|
modules/roles.py
|
modules/roles.py
|
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
if len(msg) != 1:
try:
await client.add_roles(message.author,message.server.roles[role[0]])
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
client.send_message(message.channel, msg)
else:
pass
|
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
role_to_assign = message.server.roles[role[0]]
if len(msg) != 1:
try:
if role_to_assign in message.author.roles:
await client.remove_roles(message.author,role_to_assign)
msg = ":ok_hand: Removed you from " + role_to_assign + " ."
else:
await client.add_roles(message.author,role_to_assign)
msg = ":ok_hand: Added you to " + role_to_assign + " ."
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
await client.send_message(message.channel, msg)
else:
pass
|
Add role removal and logic cleanup
|
Add role removal and logic cleanup
|
Python
|
mit
|
suclearnub/scubot
|
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
+ role_to_assign = message.server.roles[role[0]]
if len(msg) != 1:
try:
+ if role_to_assign in message.author.roles:
+ await client.remove_roles(message.author,role_to_assign)
+ msg = ":ok_hand: Removed you from " + role_to_assign + " ."
+ else:
- await client.add_roles(message.author,message.server.roles[role[0]])
+ await client.add_roles(message.author,role_to_assign)
+ msg = ":ok_hand: Added you to " + role_to_assign + " ."
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
- client.send_message(message.channel, msg)
+ await client.send_message(message.channel, msg)
else:
pass
|
Add role removal and logic cleanup
|
## Code Before:
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
if len(msg) != 1:
try:
await client.add_roles(message.author,message.server.roles[role[0]])
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
client.send_message(message.channel, msg)
else:
pass
## Instruction:
Add role removal and logic cleanup
## Code After:
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
role_to_assign = message.server.roles[role[0]]
if len(msg) != 1:
try:
if role_to_assign in message.author.roles:
await client.remove_roles(message.author,role_to_assign)
msg = ":ok_hand: Removed you from " + role_to_assign + " ."
else:
await client.add_roles(message.author,role_to_assign)
msg = ":ok_hand: Added you to " + role_to_assign + " ."
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
await client.send_message(message.channel, msg)
else:
pass
|
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
+ role_to_assign = message.server.roles[role[0]]
if len(msg) != 1:
try:
+ if role_to_assign in message.author.roles:
+ await client.remove_roles(message.author,role_to_assign)
+ msg = ":ok_hand: Removed you from " + role_to_assign + " ."
+ else:
- await client.add_roles(message.author,message.server.roles[role[0]])
? --------------- ^^^^^^^^^
+ await client.add_roles(message.author,role_to_assign)
? ++++ +++++ ^^^^
+ msg = ":ok_hand: Added you to " + role_to_assign + " ."
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
- client.send_message(message.channel, msg)
? ^^^
+ await client.send_message(message.channel, msg)
? ^^^^^
else:
pass
|
7d88c98fcf6984b07a8b085f8272868b1c23b29e
|
app/status/views.py
|
app/status/views.py
|
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status=db_status
), 500
|
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status={
'status_code': 500,
'message': db_status['message'][0]
}
), 500
|
Return correct message if elasticsearch fails to connect.
|
Return correct message if elasticsearch fails to connect.
|
Python
|
mit
|
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api
|
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
- db_status=db_status
+ db_status={
+ 'status_code': 500,
+ 'message': db_status['message'][0]
+ }
), 500
|
Return correct message if elasticsearch fails to connect.
|
## Code Before:
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status=db_status
), 500
## Instruction:
Return correct message if elasticsearch fails to connect.
## Code After:
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status={
'status_code': 500,
'message': db_status['message'][0]
}
), 500
|
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
- db_status=db_status
? ^^^^^^^^^
+ db_status={
? ^
+ 'status_code': 500,
+ 'message': db_status['message'][0]
+ }
), 500
|
15a9d8b9e361462532ed286abce4ee445b9ec74a
|
analytics/rejections.py
|
analytics/rejections.py
|
from urllib.parse import urlparse
BAD_PATHS = [
'/admin/',
'/dbadmin/',
]
def should_be_rejected(log_line):
if urlparse(log_line.url).path in BAD_PATHS:
return True
return False
|
from urllib.parse import urlparse
BAD_PATHS = [
'/admin/',
'/dbadmin/',
'/myadmin/',
'/mysqladmin/',
'/mysql-admin/',
'/mysqlmanager/',
'/sqlmanager/',
'/sqlweb/',
'/webdb/',
'/websql/',
]
def should_be_rejected(log_line):
if urlparse(log_line.url).path in BAD_PATHS:
return True
return False
|
Add more to the list of bad paths
|
Add more to the list of bad paths
|
Python
|
mit
|
alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net
|
from urllib.parse import urlparse
BAD_PATHS = [
'/admin/',
'/dbadmin/',
+ '/myadmin/',
+ '/mysqladmin/',
+ '/mysql-admin/',
+ '/mysqlmanager/',
+ '/sqlmanager/',
+ '/sqlweb/',
+ '/webdb/',
+ '/websql/',
]
def should_be_rejected(log_line):
if urlparse(log_line.url).path in BAD_PATHS:
return True
return False
|
Add more to the list of bad paths
|
## Code Before:
from urllib.parse import urlparse
BAD_PATHS = [
'/admin/',
'/dbadmin/',
]
def should_be_rejected(log_line):
if urlparse(log_line.url).path in BAD_PATHS:
return True
return False
## Instruction:
Add more to the list of bad paths
## Code After:
from urllib.parse import urlparse
BAD_PATHS = [
'/admin/',
'/dbadmin/',
'/myadmin/',
'/mysqladmin/',
'/mysql-admin/',
'/mysqlmanager/',
'/sqlmanager/',
'/sqlweb/',
'/webdb/',
'/websql/',
]
def should_be_rejected(log_line):
if urlparse(log_line.url).path in BAD_PATHS:
return True
return False
|
from urllib.parse import urlparse
BAD_PATHS = [
'/admin/',
'/dbadmin/',
+ '/myadmin/',
+ '/mysqladmin/',
+ '/mysql-admin/',
+ '/mysqlmanager/',
+ '/sqlmanager/',
+ '/sqlweb/',
+ '/webdb/',
+ '/websql/',
]
def should_be_rejected(log_line):
if urlparse(log_line.url).path in BAD_PATHS:
return True
return False
|
e2cba02550dfbe8628daf024a2a35c0dffb234e9
|
python/cli/request.py
|
python/cli/request.py
|
import requests
import os
aport = os.environ.get('MYAPORT')
if aport is None:
aport = "80"
aport = "23456"
url1 = 'http://localhost:' + aport + '/'
url2 = 'http://localhost:' + aport + '/action/improvesimulateinvest'
url3 = 'http://localhost:' + aport + '/action/autosimulateinvest'
url4 = 'http://localhost:' + aport + '/action/improveautosimulateinvest'
#headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
#headers={'Content-type':'application/json', 'Accept':'application/json'}
headers={'Content-Type' : 'application/json;charset=utf-8'}
def request1(param, webpath):
return requests.post(url1 + webpath, json=param, headers=headers)
def request2(market, data):
return requests.post(url2 + '/market/' + str(market), json=data, headers=headers)
def request3(market, data):
return requests.post(url3 + '/market/' + str(market), json=data, headers=headers)
def request4(market, data):
return requests.post(url4 + '/market/' + str(market), json=data, headers=headers)
def request0(data):
return requests.post(url, data='', headers=headers)
#return requests.post(url, data=json.dumps(data), headers=headers)
|
import requests
import os
aport = os.environ.get('MYAPORT')
if aport is None:
aport = "80"
aport = "23456"
ahost = os.environ.get('MYAHOST')
if ahost is None:
ahost = "localhost"
url1 = 'http://' + ahost + ':' + aport + '/'
#headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
#headers={'Content-type':'application/json', 'Accept':'application/json'}
headers={'Content-Type' : 'application/json;charset=utf-8'}
def request1(param, webpath):
return requests.post(url1 + webpath, json=param, headers=headers)
def request0(data):
return requests.post(url, data='', headers=headers)
#return requests.post(url, data=json.dumps(data), headers=headers)
|
Handle different environments, for automation (I4).
|
Handle different environments, for automation (I4).
|
Python
|
agpl-3.0
|
rroart/aether,rroart/aether,rroart/aether,rroart/aether,rroart/aether
|
import requests
import os
aport = os.environ.get('MYAPORT')
if aport is None:
aport = "80"
+ aport = "23456"
- aport = "23456"
-
+ ahost = os.environ.get('MYAHOST')
+ if ahost is None:
+ ahost = "localhost"
+
- url1 = 'http://localhost:' + aport + '/'
+ url1 = 'http://' + ahost + ':' + aport + '/'
- url2 = 'http://localhost:' + aport + '/action/improvesimulateinvest'
- url3 = 'http://localhost:' + aport + '/action/autosimulateinvest'
- url4 = 'http://localhost:' + aport + '/action/improveautosimulateinvest'
#headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
#headers={'Content-type':'application/json', 'Accept':'application/json'}
headers={'Content-Type' : 'application/json;charset=utf-8'}
def request1(param, webpath):
return requests.post(url1 + webpath, json=param, headers=headers)
- def request2(market, data):
- return requests.post(url2 + '/market/' + str(market), json=data, headers=headers)
-
- def request3(market, data):
- return requests.post(url3 + '/market/' + str(market), json=data, headers=headers)
-
- def request4(market, data):
- return requests.post(url4 + '/market/' + str(market), json=data, headers=headers)
-
def request0(data):
return requests.post(url, data='', headers=headers)
#return requests.post(url, data=json.dumps(data), headers=headers)
|
Handle different environments, for automation (I4).
|
## Code Before:
import requests
import os
aport = os.environ.get('MYAPORT')
if aport is None:
aport = "80"
aport = "23456"
url1 = 'http://localhost:' + aport + '/'
url2 = 'http://localhost:' + aport + '/action/improvesimulateinvest'
url3 = 'http://localhost:' + aport + '/action/autosimulateinvest'
url4 = 'http://localhost:' + aport + '/action/improveautosimulateinvest'
#headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
#headers={'Content-type':'application/json', 'Accept':'application/json'}
headers={'Content-Type' : 'application/json;charset=utf-8'}
def request1(param, webpath):
return requests.post(url1 + webpath, json=param, headers=headers)
def request2(market, data):
return requests.post(url2 + '/market/' + str(market), json=data, headers=headers)
def request3(market, data):
return requests.post(url3 + '/market/' + str(market), json=data, headers=headers)
def request4(market, data):
return requests.post(url4 + '/market/' + str(market), json=data, headers=headers)
def request0(data):
return requests.post(url, data='', headers=headers)
#return requests.post(url, data=json.dumps(data), headers=headers)
## Instruction:
Handle different environments, for automation (I4).
## Code After:
import requests
import os
aport = os.environ.get('MYAPORT')
if aport is None:
aport = "80"
aport = "23456"
ahost = os.environ.get('MYAHOST')
if ahost is None:
ahost = "localhost"
url1 = 'http://' + ahost + ':' + aport + '/'
#headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
#headers={'Content-type':'application/json', 'Accept':'application/json'}
headers={'Content-Type' : 'application/json;charset=utf-8'}
def request1(param, webpath):
return requests.post(url1 + webpath, json=param, headers=headers)
def request0(data):
return requests.post(url, data='', headers=headers)
#return requests.post(url, data=json.dumps(data), headers=headers)
|
import requests
import os
aport = os.environ.get('MYAPORT')
if aport is None:
aport = "80"
+ aport = "23456"
- aport = "23456"
-
+ ahost = os.environ.get('MYAHOST')
+ if ahost is None:
+ ahost = "localhost"
+
- url1 = 'http://localhost:' + aport + '/'
? ^^^ -
+ url1 = 'http://' + ahost + ':' + aport + '/'
? ^^^^ ++++
- url2 = 'http://localhost:' + aport + '/action/improvesimulateinvest'
- url3 = 'http://localhost:' + aport + '/action/autosimulateinvest'
- url4 = 'http://localhost:' + aport + '/action/improveautosimulateinvest'
#headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
#headers={'Content-type':'application/json', 'Accept':'application/json'}
headers={'Content-Type' : 'application/json;charset=utf-8'}
def request1(param, webpath):
return requests.post(url1 + webpath, json=param, headers=headers)
- def request2(market, data):
- return requests.post(url2 + '/market/' + str(market), json=data, headers=headers)
-
- def request3(market, data):
- return requests.post(url3 + '/market/' + str(market), json=data, headers=headers)
-
- def request4(market, data):
- return requests.post(url4 + '/market/' + str(market), json=data, headers=headers)
-
def request0(data):
return requests.post(url, data='', headers=headers)
#return requests.post(url, data=json.dumps(data), headers=headers)
|
e096343aaaa916232633543d57431b7f3022215a
|
awscfncli/__main__.py
|
awscfncli/__main__.py
|
__author__ = 'kotaimen'
__date__ = '28-Feb-2018'
"""Main cli entry point, called when awscfncli is run as a package,
imported in setuptools intergration.
cli package stucture:
Click main entry:
cli/main.py
Command groups:
cli/group_named/__init__.py
Subcommands:
cli/group_name/command_name.py
All commands are imported in cli/__init__.py to get registered into click.
"""
from .cli import cfn_cli
def main():
cfn_cli()
if __name__ == '__main__':
main()
|
__author__ = 'kotaimen'
__date__ = '28-Feb-2018'
"""Main cli entry point, called when awscfncli is run as a package,
imported in setuptools intergration.
cli package stucture:
Click main entry:
cli/main.py
Command groups:
cli/group_named/__init__.py
Subcommands:
cli/group_name/command_name.py
All commands are imported in cli/__init__.py to get registered into click.
"""
from .cli import cfn_cli
def main():
cfn_cli(
auto_envvar_prefix='CFN'
)
if __name__ == '__main__':
main()
|
Add click automatic environment variable prefix.
|
Add click automatic environment variable prefix.
|
Python
|
mit
|
Kotaimen/awscfncli,Kotaimen/awscfncli
|
__author__ = 'kotaimen'
__date__ = '28-Feb-2018'
"""Main cli entry point, called when awscfncli is run as a package,
imported in setuptools intergration.
cli package stucture:
Click main entry:
cli/main.py
Command groups:
cli/group_named/__init__.py
Subcommands:
cli/group_name/command_name.py
All commands are imported in cli/__init__.py to get registered into click.
"""
from .cli import cfn_cli
def main():
- cfn_cli()
+ cfn_cli(
+ auto_envvar_prefix='CFN'
+ )
if __name__ == '__main__':
main()
|
Add click automatic environment variable prefix.
|
## Code Before:
__author__ = 'kotaimen'
__date__ = '28-Feb-2018'
"""Main cli entry point, called when awscfncli is run as a package,
imported in setuptools intergration.
cli package stucture:
Click main entry:
cli/main.py
Command groups:
cli/group_named/__init__.py
Subcommands:
cli/group_name/command_name.py
All commands are imported in cli/__init__.py to get registered into click.
"""
from .cli import cfn_cli
def main():
cfn_cli()
if __name__ == '__main__':
main()
## Instruction:
Add click automatic environment variable prefix.
## Code After:
__author__ = 'kotaimen'
__date__ = '28-Feb-2018'
"""Main cli entry point, called when awscfncli is run as a package,
imported in setuptools intergration.
cli package stucture:
Click main entry:
cli/main.py
Command groups:
cli/group_named/__init__.py
Subcommands:
cli/group_name/command_name.py
All commands are imported in cli/__init__.py to get registered into click.
"""
from .cli import cfn_cli
def main():
cfn_cli(
auto_envvar_prefix='CFN'
)
if __name__ == '__main__':
main()
|
__author__ = 'kotaimen'
__date__ = '28-Feb-2018'
"""Main cli entry point, called when awscfncli is run as a package,
imported in setuptools intergration.
cli package stucture:
Click main entry:
cli/main.py
Command groups:
cli/group_named/__init__.py
Subcommands:
cli/group_name/command_name.py
All commands are imported in cli/__init__.py to get registered into click.
"""
from .cli import cfn_cli
def main():
- cfn_cli()
? -
+ cfn_cli(
+ auto_envvar_prefix='CFN'
+ )
if __name__ == '__main__':
main()
|
4ac335e2ac69f634d51ab8b84805947fe2b87fc5
|
app.py
|
app.py
|
import os
import sys
from pushbullet import Pushbullet
def create_note(title, content):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if len(sys.argv) >= 3:
title = sys.argv[1]
body = sys.argv[2]
create_note(title, body)
else:
print("Error: Missing arguments")
|
import hug
import os
from pushbullet import Pushbullet
@hug.cli()
def create_note(title: hug.types.text, content: hug.types.text):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if __name__ == '__main__':
create_note.interface.cli()
|
Migrate command line interface to hug
|
Migrate command line interface to hug
|
Python
|
isc
|
tildecross/tildex-notify
|
+ import hug
import os
- import sys
from pushbullet import Pushbullet
- def create_note(title, content):
+ @hug.cli()
+ def create_note(title: hug.types.text, content: hug.types.text):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
+ if __name__ == '__main__':
+ create_note.interface.cli()
- if len(sys.argv) >= 3:
- title = sys.argv[1]
- body = sys.argv[2]
- create_note(title, body)
- else:
- print("Error: Missing arguments")
|
Migrate command line interface to hug
|
## Code Before:
import os
import sys
from pushbullet import Pushbullet
def create_note(title, content):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if len(sys.argv) >= 3:
title = sys.argv[1]
body = sys.argv[2]
create_note(title, body)
else:
print("Error: Missing arguments")
## Instruction:
Migrate command line interface to hug
## Code After:
import hug
import os
from pushbullet import Pushbullet
@hug.cli()
def create_note(title: hug.types.text, content: hug.types.text):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if __name__ == '__main__':
create_note.interface.cli()
|
+ import hug
import os
- import sys
from pushbullet import Pushbullet
- def create_note(title, content):
+ @hug.cli()
+ def create_note(title: hug.types.text, content: hug.types.text):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
+ if __name__ == '__main__':
+ create_note.interface.cli()
- if len(sys.argv) >= 3:
- title = sys.argv[1]
- body = sys.argv[2]
- create_note(title, body)
- else:
- print("Error: Missing arguments")
|
082076cce996593c9959fc0743f13b62d2e4842b
|
chared/__init__.py
|
chared/__init__.py
|
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
__version__ = 'r$Rev$'
|
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
import re
__version__ = re.sub('.*(\d+).*', r'rev\1', '$Rev$')
|
Make sure the version is displayed as r<revision number> if the information about the package version is not available.
|
Make sure the version is displayed as r<revision number> if the information about the package version is not available.
|
Python
|
bsd-2-clause
|
gilesbrown/chared,xmichelf/chared
|
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
- __version__ = 'r$Rev$'
+ import re
+ __version__ = re.sub('.*(\d+).*', r'rev\1', '$Rev$')
|
Make sure the version is displayed as r<revision number> if the information about the package version is not available.
|
## Code Before:
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
__version__ = 'r$Rev$'
## Instruction:
Make sure the version is displayed as r<revision number> if the information about the package version is not available.
## Code After:
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
import re
__version__ = re.sub('.*(\d+).*', r'rev\1', '$Rev$')
|
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
- __version__ = 'r$Rev$'
+ import re
+ __version__ = re.sub('.*(\d+).*', r'rev\1', '$Rev$')
|
fa279ca1f8e4c8e6b4094840d3ab40c0ac637eff
|
ocradmin/ocrpresets/models.py
|
ocradmin/ocrpresets/models.py
|
from django.db import models
from django.contrib.auth.models import User
from picklefield import fields
from tagging.fields import TagField
import tagging
class OcrPreset(models.Model):
user = models.ForeignKey(User)
tags = TagField()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
public = models.BooleanField(default=True)
created_on = models.DateField(auto_now_add=True)
updated_on = models.DateField(null=True, blank=True, auto_now=True)
type = models.CharField(max_length=20,
choices=[("segment", "Segment"), ("binarize", "Binarize")])
data = fields.PickledObjectField()
def __unicode__(self):
"""
String representation.
"""
return self.name
|
from django.db import models
from django.contrib.auth.models import User
from picklefield import fields
from tagging.fields import TagField
import tagging
class OcrPreset(models.Model):
user = models.ForeignKey(User)
tags = TagField()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
public = models.BooleanField(default=True)
created_on = models.DateField(auto_now_add=True)
updated_on = models.DateField(null=True, blank=True, auto_now=True)
type = models.CharField(max_length=20,
choices=[("segment", "Segment"), ("binarize", "Binarize")])
data = fields.PickledObjectField()
def __unicode__(self):
"""
String representation.
"""
return "<%s: %s>" % (self.__class__.__name__, self.name)
|
Improve unicode method. Whitespace cleanup
|
Improve unicode method. Whitespace cleanup
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
from django.db import models
from django.contrib.auth.models import User
from picklefield import fields
from tagging.fields import TagField
import tagging
class OcrPreset(models.Model):
user = models.ForeignKey(User)
tags = TagField()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
public = models.BooleanField(default=True)
created_on = models.DateField(auto_now_add=True)
updated_on = models.DateField(null=True, blank=True, auto_now=True)
type = models.CharField(max_length=20,
choices=[("segment", "Segment"), ("binarize", "Binarize")])
data = fields.PickledObjectField()
def __unicode__(self):
"""
String representation.
"""
- return self.name
+ return "<%s: %s>" % (self.__class__.__name__, self.name)
|
Improve unicode method. Whitespace cleanup
|
## Code Before:
from django.db import models
from django.contrib.auth.models import User
from picklefield import fields
from tagging.fields import TagField
import tagging
class OcrPreset(models.Model):
user = models.ForeignKey(User)
tags = TagField()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
public = models.BooleanField(default=True)
created_on = models.DateField(auto_now_add=True)
updated_on = models.DateField(null=True, blank=True, auto_now=True)
type = models.CharField(max_length=20,
choices=[("segment", "Segment"), ("binarize", "Binarize")])
data = fields.PickledObjectField()
def __unicode__(self):
"""
String representation.
"""
return self.name
## Instruction:
Improve unicode method. Whitespace cleanup
## Code After:
from django.db import models
from django.contrib.auth.models import User
from picklefield import fields
from tagging.fields import TagField
import tagging
class OcrPreset(models.Model):
user = models.ForeignKey(User)
tags = TagField()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
public = models.BooleanField(default=True)
created_on = models.DateField(auto_now_add=True)
updated_on = models.DateField(null=True, blank=True, auto_now=True)
type = models.CharField(max_length=20,
choices=[("segment", "Segment"), ("binarize", "Binarize")])
data = fields.PickledObjectField()
def __unicode__(self):
"""
String representation.
"""
return "<%s: %s>" % (self.__class__.__name__, self.name)
|
from django.db import models
from django.contrib.auth.models import User
from picklefield import fields
from tagging.fields import TagField
import tagging
class OcrPreset(models.Model):
user = models.ForeignKey(User)
tags = TagField()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
public = models.BooleanField(default=True)
created_on = models.DateField(auto_now_add=True)
updated_on = models.DateField(null=True, blank=True, auto_now=True)
type = models.CharField(max_length=20,
choices=[("segment", "Segment"), ("binarize", "Binarize")])
data = fields.PickledObjectField()
def __unicode__(self):
"""
String representation.
"""
- return self.name
+ return "<%s: %s>" % (self.__class__.__name__, self.name)
|
514e41f8cb3717f3fcd0c1283e60e9f202b79598
|
saddle-points/saddle_points.py
|
saddle-points/saddle_points.py
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == min(row) and col[i] == max(col))
or (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
Correct it to actually follow the README...
|
Correct it to actually follow the README...
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
- if (row[j] == min(row) and col[i] == max(col))
- or (row[j] == max(row) and col[i] == min(col)))
+ if (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
Correct it to actually follow the README...
|
## Code Before:
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == min(row) and col[i] == max(col))
or (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
## Instruction:
Correct it to actually follow the README...
## Code After:
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
- if (row[j] == min(row) and col[i] == max(col))
- or (row[j] == max(row) and col[i] == min(col)))
? ^^
+ if (row[j] == max(row) and col[i] == min(col)))
? ^^
def transpose(m):
return [list(col) for col in zip(*m)]
|
9db0f0430466b9d4d70c7803f7d39ecdeb85e375
|
src/puzzle/problems/image/image_problem.py
|
src/puzzle/problems/image/image_problem.py
|
import numpy as np
from puzzle.problems import problem
class ImageProblem(problem.Problem):
def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None:
super(ImageProblem, self).__init__(name, data, *args, **kwargs)
@staticmethod
def score(data: problem.ProblemData) -> float:
if not isinstance(data, np.ndarray):
return 0
if data.dtype == np.uint8:
return 1
return .5
def __str__(self) -> str:
return '<image data>'
def _solve(self) -> dict:
return {}
|
import numpy as np
from data.image import image
from puzzle.constraints.image import prepare_image_constraints
from puzzle.problems import problem
from puzzle.steps.image import prepare_image
class ImageProblem(problem.Problem):
_source_image: image.Image
_prepare_image: prepare_image.PrepareImage
def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None:
super(ImageProblem, self).__init__(name, data, *args, **kwargs)
self._source_image = image.Image(data)
self._prepare_image = prepare_image.PrepareImage(
prepare_image_constraints.PrepareImageConstraints(), self._source_image)
self._solutions_generator.depends_on(self._prepare_image)
@staticmethod
def score(data: problem.ProblemData) -> float:
if not isinstance(data, np.ndarray):
return 0
if data.dtype == np.uint8:
return 1
return .5
def __str__(self) -> str:
return '<image data>'
def _solve(self) -> dict:
return {}
|
Update ImageProblem to use PrepareImage step.
|
Update ImageProblem to use PrepareImage step.
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
import numpy as np
+ from data.image import image
+ from puzzle.constraints.image import prepare_image_constraints
from puzzle.problems import problem
+ from puzzle.steps.image import prepare_image
class ImageProblem(problem.Problem):
+ _source_image: image.Image
+ _prepare_image: prepare_image.PrepareImage
+
def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None:
super(ImageProblem, self).__init__(name, data, *args, **kwargs)
+ self._source_image = image.Image(data)
+ self._prepare_image = prepare_image.PrepareImage(
+ prepare_image_constraints.PrepareImageConstraints(), self._source_image)
+ self._solutions_generator.depends_on(self._prepare_image)
@staticmethod
def score(data: problem.ProblemData) -> float:
if not isinstance(data, np.ndarray):
return 0
if data.dtype == np.uint8:
return 1
return .5
def __str__(self) -> str:
return '<image data>'
def _solve(self) -> dict:
return {}
|
Update ImageProblem to use PrepareImage step.
|
## Code Before:
import numpy as np
from puzzle.problems import problem
class ImageProblem(problem.Problem):
def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None:
super(ImageProblem, self).__init__(name, data, *args, **kwargs)
@staticmethod
def score(data: problem.ProblemData) -> float:
if not isinstance(data, np.ndarray):
return 0
if data.dtype == np.uint8:
return 1
return .5
def __str__(self) -> str:
return '<image data>'
def _solve(self) -> dict:
return {}
## Instruction:
Update ImageProblem to use PrepareImage step.
## Code After:
import numpy as np
from data.image import image
from puzzle.constraints.image import prepare_image_constraints
from puzzle.problems import problem
from puzzle.steps.image import prepare_image
class ImageProblem(problem.Problem):
_source_image: image.Image
_prepare_image: prepare_image.PrepareImage
def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None:
super(ImageProblem, self).__init__(name, data, *args, **kwargs)
self._source_image = image.Image(data)
self._prepare_image = prepare_image.PrepareImage(
prepare_image_constraints.PrepareImageConstraints(), self._source_image)
self._solutions_generator.depends_on(self._prepare_image)
@staticmethod
def score(data: problem.ProblemData) -> float:
if not isinstance(data, np.ndarray):
return 0
if data.dtype == np.uint8:
return 1
return .5
def __str__(self) -> str:
return '<image data>'
def _solve(self) -> dict:
return {}
|
import numpy as np
+ from data.image import image
+ from puzzle.constraints.image import prepare_image_constraints
from puzzle.problems import problem
+ from puzzle.steps.image import prepare_image
class ImageProblem(problem.Problem):
+ _source_image: image.Image
+ _prepare_image: prepare_image.PrepareImage
+
def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None:
super(ImageProblem, self).__init__(name, data, *args, **kwargs)
+ self._source_image = image.Image(data)
+ self._prepare_image = prepare_image.PrepareImage(
+ prepare_image_constraints.PrepareImageConstraints(), self._source_image)
+ self._solutions_generator.depends_on(self._prepare_image)
@staticmethod
def score(data: problem.ProblemData) -> float:
if not isinstance(data, np.ndarray):
return 0
if data.dtype == np.uint8:
return 1
return .5
def __str__(self) -> str:
return '<image data>'
def _solve(self) -> dict:
return {}
|
0dd9fba16a73954a3bbb18c5b2de9995c07ef56f
|
pushbullet/filetype.py
|
pushbullet/filetype.py
|
def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return file_type
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
|
def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return file_type.decode("ASCII")
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
|
Fix libmagic issue with Python 3
|
Fix libmagic issue with Python 3
|
Python
|
mit
|
kovacsbalu/pushbullet.py,randomchars/pushbullet.py,Saturn/pushbullet.py
|
def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
- return file_type
+ return file_type.decode("ASCII")
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
|
Fix libmagic issue with Python 3
|
## Code Before:
def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return file_type
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
## Instruction:
Fix libmagic issue with Python 3
## Code After:
def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return file_type.decode("ASCII")
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
|
def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
- return file_type
+ return file_type.decode("ASCII")
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
|
dbe40d21d6f38cbb0827eeaaaaab425dd9b724ca
|
tasks/__init__.py
|
tasks/__init__.py
|
from celery import Celery
from tornado.options import options
from tasks.helpers import create_mq_url
queue_conf = {
'CELERY_TASK_SERIALIZER': 'json',
'CELERY_ACCEPT_CONTENT': ['json'],
'CELERY_RESULT_SERIALIZER': 'json',
'CELERY_TASK_RESULT_EXPIRES': 3600
}
selftest_task_queue = Celery(
'selftest_task_queue',
backend='rpc',
broker=create_mq_url(options.mq_hostname, options.mq_port,
username=options.mq_username,
password=options.mq_password),
include=[
])
selftest_task_queue.conf.update(**queue_conf)
|
from celery import Celery
from tornado.options import options
from tasks.helpers import create_mq_url
queue_conf = {
'CELERY_TASK_SERIALIZER': 'json',
'CELERY_ACCEPT_CONTENT': ['json'],
'CELERY_RESULT_SERIALIZER': 'json',
'CELERY_TASK_RESULT_EXPIRES': 3600
}
selftest_task_queue = Celery(
'selftest_task_queue',
backend='rpc',
broker=create_mq_url(options.mq_hostname, options.mq_port,
username=options.mq_username,
password=options.mq_password),
include=[
"tasks.message_tasks"
])
selftest_task_queue.conf.update(**queue_conf)
|
Add tasks to list of mq tasks
|
Add tasks to list of mq tasks
|
Python
|
apache-2.0
|
BishopFox/SpoofcheckSelfTest,BishopFox/SpoofcheckSelfTest,BishopFox/SpoofcheckSelfTest
|
from celery import Celery
from tornado.options import options
from tasks.helpers import create_mq_url
queue_conf = {
'CELERY_TASK_SERIALIZER': 'json',
'CELERY_ACCEPT_CONTENT': ['json'],
'CELERY_RESULT_SERIALIZER': 'json',
'CELERY_TASK_RESULT_EXPIRES': 3600
}
selftest_task_queue = Celery(
'selftest_task_queue',
backend='rpc',
broker=create_mq_url(options.mq_hostname, options.mq_port,
username=options.mq_username,
password=options.mq_password),
include=[
-
+ "tasks.message_tasks"
])
selftest_task_queue.conf.update(**queue_conf)
|
Add tasks to list of mq tasks
|
## Code Before:
from celery import Celery
from tornado.options import options
from tasks.helpers import create_mq_url
queue_conf = {
'CELERY_TASK_SERIALIZER': 'json',
'CELERY_ACCEPT_CONTENT': ['json'],
'CELERY_RESULT_SERIALIZER': 'json',
'CELERY_TASK_RESULT_EXPIRES': 3600
}
selftest_task_queue = Celery(
'selftest_task_queue',
backend='rpc',
broker=create_mq_url(options.mq_hostname, options.mq_port,
username=options.mq_username,
password=options.mq_password),
include=[
])
selftest_task_queue.conf.update(**queue_conf)
## Instruction:
Add tasks to list of mq tasks
## Code After:
from celery import Celery
from tornado.options import options
from tasks.helpers import create_mq_url
queue_conf = {
'CELERY_TASK_SERIALIZER': 'json',
'CELERY_ACCEPT_CONTENT': ['json'],
'CELERY_RESULT_SERIALIZER': 'json',
'CELERY_TASK_RESULT_EXPIRES': 3600
}
selftest_task_queue = Celery(
'selftest_task_queue',
backend='rpc',
broker=create_mq_url(options.mq_hostname, options.mq_port,
username=options.mq_username,
password=options.mq_password),
include=[
"tasks.message_tasks"
])
selftest_task_queue.conf.update(**queue_conf)
|
from celery import Celery
from tornado.options import options
from tasks.helpers import create_mq_url
queue_conf = {
'CELERY_TASK_SERIALIZER': 'json',
'CELERY_ACCEPT_CONTENT': ['json'],
'CELERY_RESULT_SERIALIZER': 'json',
'CELERY_TASK_RESULT_EXPIRES': 3600
}
selftest_task_queue = Celery(
'selftest_task_queue',
backend='rpc',
broker=create_mq_url(options.mq_hostname, options.mq_port,
username=options.mq_username,
password=options.mq_password),
include=[
-
+ "tasks.message_tasks"
])
selftest_task_queue.conf.update(**queue_conf)
|
fb4bac2a228a196359317f338c3f1e6643c3837d
|
nova/tests/unit/compute/fake_resource_tracker.py
|
nova/tests/unit/compute/fake_resource_tracker.py
|
from nova.compute import resource_tracker
class FakeResourceTracker(resource_tracker.ResourceTracker):
"""Version without a DB requirement."""
def _create(self, context, values):
self._write_ext_resources(values)
self.compute_node = values
self.compute_node['id'] = 1
def _update(self, context):
self._write_ext_resources(self.compute_node)
|
from nova.compute import resource_tracker
class FakeResourceTracker(resource_tracker.ResourceTracker):
"""Version without a DB requirement."""
def _update(self, context):
self._write_ext_resources(self.compute_node)
|
Remove an unused method in FakeResourceTracker
|
Remove an unused method in FakeResourceTracker
Nothing calls _create and there is no _create in the super class for
this to be overriding.
Change-Id: Ic41f2d249b9aaffb2caaa18dd492924a4ceb3653
|
Python
|
apache-2.0
|
gooddata/openstack-nova,Juniper/nova,mikalstill/nova,cernops/nova,cernops/nova,vmturbo/nova,NeCTAR-RC/nova,klmitch/nova,jianghuaw/nova,mikalstill/nova,klmitch/nova,Juniper/nova,cloudbase/nova,cernops/nova,openstack/nova,rahulunair/nova,gooddata/openstack-nova,jianghuaw/nova,klmitch/nova,NeCTAR-RC/nova,mikalstill/nova,hanlind/nova,rajalokan/nova,vmturbo/nova,jianghuaw/nova,cyx1231st/nova,openstack/nova,bigswitch/nova,phenoxim/nova,sebrandon1/nova,gooddata/openstack-nova,gooddata/openstack-nova,mahak/nova,zhimin711/nova,mahak/nova,cloudbase/nova,alaski/nova,BeyondTheClouds/nova,zhimin711/nova,alaski/nova,vmturbo/nova,hanlind/nova,openstack/nova,rajalokan/nova,CEG-FYP-OpenStack/scheduler,sebrandon1/nova,BeyondTheClouds/nova,bigswitch/nova,rajalokan/nova,vmturbo/nova,Juniper/nova,phenoxim/nova,cloudbase/nova,cyx1231st/nova,sebrandon1/nova,mahak/nova,CEG-FYP-OpenStack/scheduler,BeyondTheClouds/nova,klmitch/nova,rahulunair/nova,hanlind/nova,jianghuaw/nova,rahulunair/nova,rajalokan/nova,Juniper/nova
|
from nova.compute import resource_tracker
class FakeResourceTracker(resource_tracker.ResourceTracker):
"""Version without a DB requirement."""
- def _create(self, context, values):
- self._write_ext_resources(values)
- self.compute_node = values
- self.compute_node['id'] = 1
-
def _update(self, context):
self._write_ext_resources(self.compute_node)
|
Remove an unused method in FakeResourceTracker
|
## Code Before:
from nova.compute import resource_tracker
class FakeResourceTracker(resource_tracker.ResourceTracker):
"""Version without a DB requirement."""
def _create(self, context, values):
self._write_ext_resources(values)
self.compute_node = values
self.compute_node['id'] = 1
def _update(self, context):
self._write_ext_resources(self.compute_node)
## Instruction:
Remove an unused method in FakeResourceTracker
## Code After:
from nova.compute import resource_tracker
class FakeResourceTracker(resource_tracker.ResourceTracker):
"""Version without a DB requirement."""
def _update(self, context):
self._write_ext_resources(self.compute_node)
|
from nova.compute import resource_tracker
class FakeResourceTracker(resource_tracker.ResourceTracker):
"""Version without a DB requirement."""
- def _create(self, context, values):
- self._write_ext_resources(values)
- self.compute_node = values
- self.compute_node['id'] = 1
-
def _update(self, context):
self._write_ext_resources(self.compute_node)
|
d675dbcab18d56ae4c2c2f05d342159c1032b7b4
|
polling_stations/apps/data_importers/management/commands/import_fake_exeter.py
|
polling_stations/apps/data_importers/management/commands/import_fake_exeter.py
|
from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
from pathlib import Path
def make_base_folder_path():
base_folder_path = Path.cwd() / Path("test_data/pollingstations_data/EXE")
return str(base_folder_path)
class Command(BaseXpressDemocracyClubCsvImporter):
local_files = True
base_folder_path = make_base_folder_path()
council_id = "EXE"
addresses_name = "Democracy_Club__02May2019exe.CSV"
stations_name = "Democracy_Club__02May2019exe.CSV"
|
from django.contrib.gis.geos import Point
from addressbase.models import UprnToCouncil
from data_importers.mixins import AdvanceVotingMixin
from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
from pathlib import Path
from pollingstations.models import AdvanceVotingStation
def make_base_folder_path():
base_folder_path = Path.cwd() / Path("test_data/pollingstations_data/EXE")
return str(base_folder_path)
class Command(BaseXpressDemocracyClubCsvImporter, AdvanceVotingMixin):
local_files = True
base_folder_path = make_base_folder_path()
council_id = "EXE"
addresses_name = "Democracy_Club__02May2019exe.CSV"
stations_name = "Democracy_Club__02May2019exe.CSV"
def add_advance_voting_stations(self):
advance_station = AdvanceVotingStation(
name="Exeter Guildhall",
address="""Exeter City Council
Civic Centre
Paris Street
Exeter
Devon
""",
postcode="EX1 1JN",
location=Point(-3.5245510056787057, 50.72486002944331, srid=4326),
)
advance_station.save()
UprnToCouncil.objects.filter(lad=self.council.geography.gss).update(
advance_voting_station=advance_station
)
|
Add Advance Voting stations to fake Exeter importer
|
Add Advance Voting stations to fake Exeter importer
|
Python
|
bsd-3-clause
|
DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
|
+ from django.contrib.gis.geos import Point
+
+ from addressbase.models import UprnToCouncil
+ from data_importers.mixins import AdvanceVotingMixin
from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
from pathlib import Path
+
+ from pollingstations.models import AdvanceVotingStation
def make_base_folder_path():
base_folder_path = Path.cwd() / Path("test_data/pollingstations_data/EXE")
return str(base_folder_path)
- class Command(BaseXpressDemocracyClubCsvImporter):
+ class Command(BaseXpressDemocracyClubCsvImporter, AdvanceVotingMixin):
local_files = True
base_folder_path = make_base_folder_path()
council_id = "EXE"
addresses_name = "Democracy_Club__02May2019exe.CSV"
stations_name = "Democracy_Club__02May2019exe.CSV"
+ def add_advance_voting_stations(self):
+ advance_station = AdvanceVotingStation(
+ name="Exeter Guildhall",
+ address="""Exeter City Council
+ Civic Centre
+ Paris Street
+ Exeter
+ Devon
+ """,
+ postcode="EX1 1JN",
+ location=Point(-3.5245510056787057, 50.72486002944331, srid=4326),
+ )
+ advance_station.save()
+ UprnToCouncil.objects.filter(lad=self.council.geography.gss).update(
+ advance_voting_station=advance_station
+ )
+
|
Add Advance Voting stations to fake Exeter importer
|
## Code Before:
from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
from pathlib import Path
def make_base_folder_path():
base_folder_path = Path.cwd() / Path("test_data/pollingstations_data/EXE")
return str(base_folder_path)
class Command(BaseXpressDemocracyClubCsvImporter):
local_files = True
base_folder_path = make_base_folder_path()
council_id = "EXE"
addresses_name = "Democracy_Club__02May2019exe.CSV"
stations_name = "Democracy_Club__02May2019exe.CSV"
## Instruction:
Add Advance Voting stations to fake Exeter importer
## Code After:
from django.contrib.gis.geos import Point
from addressbase.models import UprnToCouncil
from data_importers.mixins import AdvanceVotingMixin
from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
from pathlib import Path
from pollingstations.models import AdvanceVotingStation
def make_base_folder_path():
base_folder_path = Path.cwd() / Path("test_data/pollingstations_data/EXE")
return str(base_folder_path)
class Command(BaseXpressDemocracyClubCsvImporter, AdvanceVotingMixin):
local_files = True
base_folder_path = make_base_folder_path()
council_id = "EXE"
addresses_name = "Democracy_Club__02May2019exe.CSV"
stations_name = "Democracy_Club__02May2019exe.CSV"
def add_advance_voting_stations(self):
advance_station = AdvanceVotingStation(
name="Exeter Guildhall",
address="""Exeter City Council
Civic Centre
Paris Street
Exeter
Devon
""",
postcode="EX1 1JN",
location=Point(-3.5245510056787057, 50.72486002944331, srid=4326),
)
advance_station.save()
UprnToCouncil.objects.filter(lad=self.council.geography.gss).update(
advance_voting_station=advance_station
)
|
+ from django.contrib.gis.geos import Point
+
+ from addressbase.models import UprnToCouncil
+ from data_importers.mixins import AdvanceVotingMixin
from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
from pathlib import Path
+
+ from pollingstations.models import AdvanceVotingStation
def make_base_folder_path():
base_folder_path = Path.cwd() / Path("test_data/pollingstations_data/EXE")
return str(base_folder_path)
- class Command(BaseXpressDemocracyClubCsvImporter):
+ class Command(BaseXpressDemocracyClubCsvImporter, AdvanceVotingMixin):
? ++++++++++++++++++++
local_files = True
base_folder_path = make_base_folder_path()
council_id = "EXE"
addresses_name = "Democracy_Club__02May2019exe.CSV"
stations_name = "Democracy_Club__02May2019exe.CSV"
+
+ def add_advance_voting_stations(self):
+ advance_station = AdvanceVotingStation(
+ name="Exeter Guildhall",
+ address="""Exeter City Council
+ Civic Centre
+ Paris Street
+ Exeter
+ Devon
+ """,
+ postcode="EX1 1JN",
+ location=Point(-3.5245510056787057, 50.72486002944331, srid=4326),
+ )
+ advance_station.save()
+ UprnToCouncil.objects.filter(lad=self.council.geography.gss).update(
+ advance_voting_station=advance_station
+ )
|
29e6e77b03569d39e484b47efd3b8230f30ee195
|
eduid_signup/db.py
|
eduid_signup/db.py
|
import pymongo
from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.Connection):
self.db_uri = urlparse.urlparse(db_uri)
self.connection = connection_factory(
host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
|
import pymongo
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.MongoClient):
self.db_uri = db_uri
self.connection = connection_factory(
host=self.db_uri,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
|
Allow Mongo connections to Mongo Replicaset Cluster
|
Allow Mongo connections to Mongo Replicaset Cluster
|
Python
|
bsd-3-clause
|
SUNET/eduid-signup,SUNET/eduid-signup,SUNET/eduid-signup
|
import pymongo
-
- from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
- connection_factory=pymongo.Connection):
+ connection_factory=pymongo.MongoClient):
- self.db_uri = urlparse.urlparse(db_uri)
+ self.db_uri = db_uri
self.connection = connection_factory(
+ host=self.db_uri,
- host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
- port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
|
Allow Mongo connections to Mongo Replicaset Cluster
|
## Code Before:
import pymongo
from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.Connection):
self.db_uri = urlparse.urlparse(db_uri)
self.connection = connection_factory(
host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
## Instruction:
Allow Mongo connections to Mongo Replicaset Cluster
## Code After:
import pymongo
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.MongoClient):
self.db_uri = db_uri
self.connection = connection_factory(
host=self.db_uri,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
|
import pymongo
-
- from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
- connection_factory=pymongo.Connection):
? ^ -- ---
+ connection_factory=pymongo.MongoClient):
? ^ ++++++
- self.db_uri = urlparse.urlparse(db_uri)
+ self.db_uri = db_uri
self.connection = connection_factory(
+ host=self.db_uri,
- host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
- port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
|
2d908f812a0cfeab18e36733ec3380e507865c20
|
tests/test_auth.py
|
tests/test_auth.py
|
from unittest import TestCase
class TestOneAll(TestCase):
def test_whether_test_runs(self):
self.assertTrue(True)
|
from __future__ import absolute_import, division, print_function, unicode_literals
from unittest import TestCase, main
from pyoneall import OneAll
from pyoneall.classes import BadOneAllCredentials, Connections
class TestOneAll(TestCase):
VALID_CREDENTIALS = {
'site_name': 'python',
'public_key': '2d27cffd-1ced-4991-83d1-acce715461e5',
# I really hope this doesn't Jynx my accounts.
'private_key': '84d94998-4029-4ac3-be9b-f2825100da6a',
}
INVALID_CREDENTIALS = {
'site_name': 'python',
'public_key': '01234567-89ab-cdef-0123-456789abcdef',
'private_key': '01234567-89ab-cdef-0123-456789abcdef',
}
def test_00_whether_test_runs(self):
self.assertTrue(True)
def test_01_users_list(self):
auth = OneAll(**self.VALID_CREDENTIALS)
c = auth.connections()
self.assertIsInstance(c, Connections)
def test_02_bad_credentials(self):
auth = OneAll(**self.INVALID_CREDENTIALS)
with self.assertRaises(BadOneAllCredentials):
auth.connections()
def dont_test_03_swapped_credentials(self):
kwargs = dict(self.VALID_CREDENTIALS)
kwargs['private_key'], kwargs['public_key'] = kwargs['public_key'], kwargs['private_key']
auth = OneAll(**kwargs)
# How should this result be different from test 02?
with self.assertRaises(BadOneAllCredentials):
auth.connections()
if __name__ == '__main__':
main()
|
Test suite is taking shape. :)
|
Test suite is taking shape. :)
|
Python
|
mit
|
leandigo/pyoneall
|
+ from __future__ import absolute_import, division, print_function, unicode_literals
- from unittest import TestCase
+ from unittest import TestCase, main
+
+ from pyoneall import OneAll
+ from pyoneall.classes import BadOneAllCredentials, Connections
class TestOneAll(TestCase):
+ VALID_CREDENTIALS = {
+ 'site_name': 'python',
+ 'public_key': '2d27cffd-1ced-4991-83d1-acce715461e5',
+ # I really hope this doesn't Jynx my accounts.
+ 'private_key': '84d94998-4029-4ac3-be9b-f2825100da6a',
+ }
+
+ INVALID_CREDENTIALS = {
+ 'site_name': 'python',
+ 'public_key': '01234567-89ab-cdef-0123-456789abcdef',
+ 'private_key': '01234567-89ab-cdef-0123-456789abcdef',
+ }
+
- def test_whether_test_runs(self):
+ def test_00_whether_test_runs(self):
self.assertTrue(True)
+ def test_01_users_list(self):
+ auth = OneAll(**self.VALID_CREDENTIALS)
+ c = auth.connections()
+ self.assertIsInstance(c, Connections)
+
+ def test_02_bad_credentials(self):
+ auth = OneAll(**self.INVALID_CREDENTIALS)
+ with self.assertRaises(BadOneAllCredentials):
+ auth.connections()
+
+ def dont_test_03_swapped_credentials(self):
+ kwargs = dict(self.VALID_CREDENTIALS)
+ kwargs['private_key'], kwargs['public_key'] = kwargs['public_key'], kwargs['private_key']
+ auth = OneAll(**kwargs)
+ # How should this result be different from test 02?
+ with self.assertRaises(BadOneAllCredentials):
+ auth.connections()
+
+
+ if __name__ == '__main__':
+ main()
+
|
Test suite is taking shape. :)
|
## Code Before:
from unittest import TestCase
class TestOneAll(TestCase):
def test_whether_test_runs(self):
self.assertTrue(True)
## Instruction:
Test suite is taking shape. :)
## Code After:
from __future__ import absolute_import, division, print_function, unicode_literals
from unittest import TestCase, main
from pyoneall import OneAll
from pyoneall.classes import BadOneAllCredentials, Connections
class TestOneAll(TestCase):
VALID_CREDENTIALS = {
'site_name': 'python',
'public_key': '2d27cffd-1ced-4991-83d1-acce715461e5',
# I really hope this doesn't Jynx my accounts.
'private_key': '84d94998-4029-4ac3-be9b-f2825100da6a',
}
INVALID_CREDENTIALS = {
'site_name': 'python',
'public_key': '01234567-89ab-cdef-0123-456789abcdef',
'private_key': '01234567-89ab-cdef-0123-456789abcdef',
}
def test_00_whether_test_runs(self):
self.assertTrue(True)
def test_01_users_list(self):
auth = OneAll(**self.VALID_CREDENTIALS)
c = auth.connections()
self.assertIsInstance(c, Connections)
def test_02_bad_credentials(self):
auth = OneAll(**self.INVALID_CREDENTIALS)
with self.assertRaises(BadOneAllCredentials):
auth.connections()
def dont_test_03_swapped_credentials(self):
kwargs = dict(self.VALID_CREDENTIALS)
kwargs['private_key'], kwargs['public_key'] = kwargs['public_key'], kwargs['private_key']
auth = OneAll(**kwargs)
# How should this result be different from test 02?
with self.assertRaises(BadOneAllCredentials):
auth.connections()
if __name__ == '__main__':
main()
|
+ from __future__ import absolute_import, division, print_function, unicode_literals
- from unittest import TestCase
+ from unittest import TestCase, main
? ++++++
+
+ from pyoneall import OneAll
+ from pyoneall.classes import BadOneAllCredentials, Connections
class TestOneAll(TestCase):
+ VALID_CREDENTIALS = {
+ 'site_name': 'python',
+ 'public_key': '2d27cffd-1ced-4991-83d1-acce715461e5',
+ # I really hope this doesn't Jynx my accounts.
+ 'private_key': '84d94998-4029-4ac3-be9b-f2825100da6a',
+ }
+
+ INVALID_CREDENTIALS = {
+ 'site_name': 'python',
+ 'public_key': '01234567-89ab-cdef-0123-456789abcdef',
+ 'private_key': '01234567-89ab-cdef-0123-456789abcdef',
+ }
+
- def test_whether_test_runs(self):
+ def test_00_whether_test_runs(self):
? +++
self.assertTrue(True)
+
+ def test_01_users_list(self):
+ auth = OneAll(**self.VALID_CREDENTIALS)
+ c = auth.connections()
+ self.assertIsInstance(c, Connections)
+
+ def test_02_bad_credentials(self):
+ auth = OneAll(**self.INVALID_CREDENTIALS)
+ with self.assertRaises(BadOneAllCredentials):
+ auth.connections()
+
+ def dont_test_03_swapped_credentials(self):
+ kwargs = dict(self.VALID_CREDENTIALS)
+ kwargs['private_key'], kwargs['public_key'] = kwargs['public_key'], kwargs['private_key']
+ auth = OneAll(**kwargs)
+ # How should this result be different from test 02?
+ with self.assertRaises(BadOneAllCredentials):
+ auth.connections()
+
+
+ if __name__ == '__main__':
+ main()
|
7b1773d5c3fa07899ad9d56d4ac488c1c2e2014e
|
dope_cherry.py
|
dope_cherry.py
|
from dope import app
import cherrypy
# graft to tree root
cherrypy.tree.graft(app)
# configure
cherrypy.config.update({
'server.socket_port': 80,
'server.socket_host': '0.0.0.0',
'run_as_user': 'nobody',
'run_as_group': 'nogroup',
})
cherrypy.config.update('dope_cherry.cfg')
# drop priviledges
cherrypy.process.plugins.DropPrivileges(cherrypy.engine, uid = cherrypy.config['run_as_user'], gid = cherrypy.config['run_as_group']).subscribe()
|
from dope import app
import cherrypy
# graft to tree root
cherrypy.tree.graft(app)
# configure
cherrypy.config.update({
'server.socket_port': 80,
'server.socket_host': '0.0.0.0',
'server.max_request_body_size': 0, # unlimited
'run_as_user': 'nobody',
'run_as_group': 'nogroup',
})
cherrypy.config.update('dope_cherry.cfg')
# drop priviledges
cherrypy.process.plugins.DropPrivileges(cherrypy.engine, uid = cherrypy.config['run_as_user'], gid = cherrypy.config['run_as_group']).subscribe()
|
Set server.max_request_body_size in cherrypy settings to allow more then 100M uploads.
|
Set server.max_request_body_size in cherrypy settings to allow more then 100M uploads.
|
Python
|
mit
|
mbr/dope,mbr/dope
|
from dope import app
import cherrypy
# graft to tree root
cherrypy.tree.graft(app)
# configure
cherrypy.config.update({
'server.socket_port': 80,
'server.socket_host': '0.0.0.0',
+ 'server.max_request_body_size': 0, # unlimited
'run_as_user': 'nobody',
'run_as_group': 'nogroup',
})
cherrypy.config.update('dope_cherry.cfg')
# drop priviledges
cherrypy.process.plugins.DropPrivileges(cherrypy.engine, uid = cherrypy.config['run_as_user'], gid = cherrypy.config['run_as_group']).subscribe()
|
Set server.max_request_body_size in cherrypy settings to allow more then 100M uploads.
|
## Code Before:
from dope import app
import cherrypy
# graft to tree root
cherrypy.tree.graft(app)
# configure
cherrypy.config.update({
'server.socket_port': 80,
'server.socket_host': '0.0.0.0',
'run_as_user': 'nobody',
'run_as_group': 'nogroup',
})
cherrypy.config.update('dope_cherry.cfg')
# drop priviledges
cherrypy.process.plugins.DropPrivileges(cherrypy.engine, uid = cherrypy.config['run_as_user'], gid = cherrypy.config['run_as_group']).subscribe()
## Instruction:
Set server.max_request_body_size in cherrypy settings to allow more then 100M uploads.
## Code After:
from dope import app
import cherrypy
# graft to tree root
cherrypy.tree.graft(app)
# configure
cherrypy.config.update({
'server.socket_port': 80,
'server.socket_host': '0.0.0.0',
'server.max_request_body_size': 0, # unlimited
'run_as_user': 'nobody',
'run_as_group': 'nogroup',
})
cherrypy.config.update('dope_cherry.cfg')
# drop priviledges
cherrypy.process.plugins.DropPrivileges(cherrypy.engine, uid = cherrypy.config['run_as_user'], gid = cherrypy.config['run_as_group']).subscribe()
|
from dope import app
import cherrypy
# graft to tree root
cherrypy.tree.graft(app)
# configure
cherrypy.config.update({
'server.socket_port': 80,
'server.socket_host': '0.0.0.0',
+ 'server.max_request_body_size': 0, # unlimited
'run_as_user': 'nobody',
'run_as_group': 'nogroup',
})
cherrypy.config.update('dope_cherry.cfg')
# drop priviledges
cherrypy.process.plugins.DropPrivileges(cherrypy.engine, uid = cherrypy.config['run_as_user'], gid = cherrypy.config['run_as_group']).subscribe()
|
cdefa6cb4a91cbbac5d2680fe2e116a2a4ebb86b
|
recipe_scrapers/allrecipes.py
|
recipe_scrapers/allrecipes.py
|
from ._abstract import AbstractScraper
class AllRecipes(AbstractScraper):
@classmethod
def host(cls):
return "allrecipes.com"
def author(self):
# NB: In the schema.org 'Recipe' type, the 'author' property is a
# single-value type, not an ItemList.
# allrecipes.com seems to render the author property as a list
# containing a single item under some circumstances.
# In those cases, the SchemaOrg class will fail due to the unexpected
# type, and this method is called as a fallback.
# Rather than implement non-standard handling in SchemaOrg, this code
# provides a (hopefully temporary!) allrecipes-specific workaround.
author = self.schema.data.get("author")
if author and type(author) == list and len(author) == 1:
return author[0].get("name")
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
from ._abstract import AbstractScraper
class AllRecipes(AbstractScraper):
@classmethod
def host(cls):
return "allrecipes.com"
def author(self):
# NB: In the schema.org 'Recipe' type, the 'author' property is a
# single-value type, not an ItemList.
# allrecipes.com seems to render the author property as a list
# containing a single item under some circumstances.
# In those cases, the SchemaOrg class will fail due to the unexpected
# type, and this method is called as a fallback.
# Rather than implement non-standard handling in SchemaOrg, this code
# provides a (hopefully temporary!) allrecipes-specific workaround.
author = self.schema.data.get("author")
if author and isinstance(author, list) and len(author) == 1:
return author[0].get("name")
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
Use 'isinstance' in preference to 'type' method
|
Use 'isinstance' in preference to 'type' method
|
Python
|
mit
|
hhursev/recipe-scraper
|
from ._abstract import AbstractScraper
class AllRecipes(AbstractScraper):
@classmethod
def host(cls):
return "allrecipes.com"
def author(self):
# NB: In the schema.org 'Recipe' type, the 'author' property is a
# single-value type, not an ItemList.
# allrecipes.com seems to render the author property as a list
# containing a single item under some circumstances.
# In those cases, the SchemaOrg class will fail due to the unexpected
# type, and this method is called as a fallback.
# Rather than implement non-standard handling in SchemaOrg, this code
# provides a (hopefully temporary!) allrecipes-specific workaround.
author = self.schema.data.get("author")
- if author and type(author) == list and len(author) == 1:
+ if author and isinstance(author, list) and len(author) == 1:
return author[0].get("name")
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
Use 'isinstance' in preference to 'type' method
|
## Code Before:
from ._abstract import AbstractScraper
class AllRecipes(AbstractScraper):
@classmethod
def host(cls):
return "allrecipes.com"
def author(self):
# NB: In the schema.org 'Recipe' type, the 'author' property is a
# single-value type, not an ItemList.
# allrecipes.com seems to render the author property as a list
# containing a single item under some circumstances.
# In those cases, the SchemaOrg class will fail due to the unexpected
# type, and this method is called as a fallback.
# Rather than implement non-standard handling in SchemaOrg, this code
# provides a (hopefully temporary!) allrecipes-specific workaround.
author = self.schema.data.get("author")
if author and type(author) == list and len(author) == 1:
return author[0].get("name")
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
## Instruction:
Use 'isinstance' in preference to 'type' method
## Code After:
from ._abstract import AbstractScraper
class AllRecipes(AbstractScraper):
@classmethod
def host(cls):
return "allrecipes.com"
def author(self):
# NB: In the schema.org 'Recipe' type, the 'author' property is a
# single-value type, not an ItemList.
# allrecipes.com seems to render the author property as a list
# containing a single item under some circumstances.
# In those cases, the SchemaOrg class will fail due to the unexpected
# type, and this method is called as a fallback.
# Rather than implement non-standard handling in SchemaOrg, this code
# provides a (hopefully temporary!) allrecipes-specific workaround.
author = self.schema.data.get("author")
if author and isinstance(author, list) and len(author) == 1:
return author[0].get("name")
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
from ._abstract import AbstractScraper
class AllRecipes(AbstractScraper):
@classmethod
def host(cls):
return "allrecipes.com"
def author(self):
# NB: In the schema.org 'Recipe' type, the 'author' property is a
# single-value type, not an ItemList.
# allrecipes.com seems to render the author property as a list
# containing a single item under some circumstances.
# In those cases, the SchemaOrg class will fail due to the unexpected
# type, and this method is called as a fallback.
# Rather than implement non-standard handling in SchemaOrg, this code
# provides a (hopefully temporary!) allrecipes-specific workaround.
author = self.schema.data.get("author")
- if author and type(author) == list and len(author) == 1:
? ^^ ^^^^
+ if author and isinstance(author, list) and len(author) == 1:
? +++++ ^^^ ^ +
return author[0].get("name")
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
d042f4ced40d8d03bd65edf798a29058f26e98c6
|
test/test_wsstat.py
|
test/test_wsstat.py
|
import hashlib
from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection
class Tests(object):
def setup(self):
self.client = WebsocketTestingClient('wss://testserver/', total_connections=1, max_connecting_sockets=1)
def teardown(self):
pass
class TestConnectedWebsocketConnection:
def setup(self):
self.token = hashlib.sha256(b'derp').hexdigest()
self.socket = ConnectedWebsocketConnection(None, self.token)
def test_message_increment(self):
assert self.socket.message_count == 0
self.socket.increment_message_counter()
assert self.socket.message_count == 1
self.socket.increment_message_counter()
assert self.socket.message_count == 2
def test_socket_as_string(self):
assert str(self.socket) == "<Websocket {}>".format(self.socket.id)
|
import hashlib
from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection
class Tests(object):
def setup(self):
self.client = WebsocketTestingClient('wss://testserver/', total_connections=3, max_connecting_sockets=3)
def test_coroutines(self):
print(self.client)
assert len(self.client.tasks._children) == (1 + self.client.total_connections)
class TestConnectedWebsocketConnection:
def setup(self):
self.token = hashlib.sha256(b'derp').hexdigest()
self.socket = ConnectedWebsocketConnection(None, self.token)
def test_message_increment(self):
assert self.socket.message_count == 0
self.socket.increment_message_counter()
assert self.socket.message_count == 1
self.socket.increment_message_counter()
assert self.socket.message_count == 2
def test_socket_as_string(self):
assert str(self.socket) == "<Websocket {}>".format(self.socket.id)
|
Add a test for running tasks
|
Add a test for running tasks
|
Python
|
mit
|
Fitblip/wsstat
|
import hashlib
from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection
class Tests(object):
def setup(self):
- self.client = WebsocketTestingClient('wss://testserver/', total_connections=1, max_connecting_sockets=1)
+ self.client = WebsocketTestingClient('wss://testserver/', total_connections=3, max_connecting_sockets=3)
- def teardown(self):
+ def test_coroutines(self):
- pass
+ print(self.client)
+ assert len(self.client.tasks._children) == (1 + self.client.total_connections)
+
class TestConnectedWebsocketConnection:
def setup(self):
self.token = hashlib.sha256(b'derp').hexdigest()
self.socket = ConnectedWebsocketConnection(None, self.token)
def test_message_increment(self):
assert self.socket.message_count == 0
self.socket.increment_message_counter()
assert self.socket.message_count == 1
self.socket.increment_message_counter()
assert self.socket.message_count == 2
def test_socket_as_string(self):
assert str(self.socket) == "<Websocket {}>".format(self.socket.id)
|
Add a test for running tasks
|
## Code Before:
import hashlib
from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection
class Tests(object):
def setup(self):
self.client = WebsocketTestingClient('wss://testserver/', total_connections=1, max_connecting_sockets=1)
def teardown(self):
pass
class TestConnectedWebsocketConnection:
def setup(self):
self.token = hashlib.sha256(b'derp').hexdigest()
self.socket = ConnectedWebsocketConnection(None, self.token)
def test_message_increment(self):
assert self.socket.message_count == 0
self.socket.increment_message_counter()
assert self.socket.message_count == 1
self.socket.increment_message_counter()
assert self.socket.message_count == 2
def test_socket_as_string(self):
assert str(self.socket) == "<Websocket {}>".format(self.socket.id)
## Instruction:
Add a test for running tasks
## Code After:
import hashlib
from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection
class Tests(object):
def setup(self):
self.client = WebsocketTestingClient('wss://testserver/', total_connections=3, max_connecting_sockets=3)
def test_coroutines(self):
print(self.client)
assert len(self.client.tasks._children) == (1 + self.client.total_connections)
class TestConnectedWebsocketConnection:
def setup(self):
self.token = hashlib.sha256(b'derp').hexdigest()
self.socket = ConnectedWebsocketConnection(None, self.token)
def test_message_increment(self):
assert self.socket.message_count == 0
self.socket.increment_message_counter()
assert self.socket.message_count == 1
self.socket.increment_message_counter()
assert self.socket.message_count == 2
def test_socket_as_string(self):
assert str(self.socket) == "<Websocket {}>".format(self.socket.id)
|
import hashlib
from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection
class Tests(object):
def setup(self):
- self.client = WebsocketTestingClient('wss://testserver/', total_connections=1, max_connecting_sockets=1)
? ^ ^
+ self.client = WebsocketTestingClient('wss://testserver/', total_connections=3, max_connecting_sockets=3)
? ^ ^
- def teardown(self):
? ^ - ^
+ def test_coroutines(self):
? ^^^^^ ^^^ ++
- pass
+ print(self.client)
+ assert len(self.client.tasks._children) == (1 + self.client.total_connections)
+
class TestConnectedWebsocketConnection:
def setup(self):
self.token = hashlib.sha256(b'derp').hexdigest()
self.socket = ConnectedWebsocketConnection(None, self.token)
def test_message_increment(self):
assert self.socket.message_count == 0
self.socket.increment_message_counter()
assert self.socket.message_count == 1
self.socket.increment_message_counter()
assert self.socket.message_count == 2
def test_socket_as_string(self):
assert str(self.socket) == "<Websocket {}>".format(self.socket.id)
|
bc4fb65f76aa011e44bbe01b7965bc99eff5d85e
|
tests/test_recalcitrant.py
|
tests/test_recalcitrant.py
|
"Test for recalcitrant and obtuse graphs to describe"
from wordgraph.points import Point
import wordgraph
import random
from utilities import EPOCH_START, time_values
def test_time_goes_backwards():
"A valid time series where time changes linearly backwards"
values = [1.0] * 10
times = (EPOCH_START-i for i in range(10))
datapoints = [Point(x=t, y=v) for (v, t) in zip(values, time)]
features = wordgraph.describe(datapoints)
assert features is None
def test_random_data():
"A time series of 50 data points where every value is random"
rng = random.Random(0)
values = [rng.random() for i in range(50)]
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert features is None
|
"Test for recalcitrant and obtuse graphs to describe"
from wordgraph.points import Point
import wordgraph
import random
import pytest
from utilities import EPOCH_START, time_values
def test_time_goes_backwards():
"A valid time series where time changes linearly backwards"
values = [1.0] * 10
times = (EPOCH_START-i for i in range(10))
datapoints = [Point(x=t, y=v) for (v, t) in zip(values, time)]
features = wordgraph.describe(datapoints)
assert features is None
def test_random_data():
"A time series of 50 data points where every value is random"
rng = random.Random(0)
values = [rng.random() for i in range(50)]
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert features is None
def test_too_few_points():
"""A time series with too few data points to be analysed.
Expected to raise an exception.
"""
with pytest.raises(ValueError):
features = wordgraph.describe([Point(x=0, y=0)])
def test_nonuniform_time_periods():
"""A time series where time periods are wildly different.
Expected to raise an exception.
"""
times = [1, 3, 4, 6, 7, 9, 10]
datapoints = [Point(x=t, y=1.0) for t in times]
with pytest.raises(ValueError):
features = wordgraph.describe(datapoints)
|
Test expected failures of the anlayzer
|
Test expected failures of the anlayzer
The analyzer is not expected to cope with too few data points for time
series with greatly varying time ranges. It should raise an exception in
these cases.
|
Python
|
apache-2.0
|
tleeuwenburg/wordgraph,tleeuwenburg/wordgraph
|
"Test for recalcitrant and obtuse graphs to describe"
from wordgraph.points import Point
import wordgraph
import random
+ import pytest
from utilities import EPOCH_START, time_values
def test_time_goes_backwards():
"A valid time series where time changes linearly backwards"
values = [1.0] * 10
times = (EPOCH_START-i for i in range(10))
datapoints = [Point(x=t, y=v) for (v, t) in zip(values, time)]
features = wordgraph.describe(datapoints)
assert features is None
def test_random_data():
"A time series of 50 data points where every value is random"
rng = random.Random(0)
values = [rng.random() for i in range(50)]
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert features is None
+ def test_too_few_points():
+ """A time series with too few data points to be analysed.
+
+ Expected to raise an exception.
+ """
+ with pytest.raises(ValueError):
+ features = wordgraph.describe([Point(x=0, y=0)])
+
+ def test_nonuniform_time_periods():
+ """A time series where time periods are wildly different.
+
+ Expected to raise an exception.
+ """
+ times = [1, 3, 4, 6, 7, 9, 10]
+ datapoints = [Point(x=t, y=1.0) for t in times]
+ with pytest.raises(ValueError):
+ features = wordgraph.describe(datapoints)
+
|
Test expected failures of the anlayzer
|
## Code Before:
"Test for recalcitrant and obtuse graphs to describe"
from wordgraph.points import Point
import wordgraph
import random
from utilities import EPOCH_START, time_values
def test_time_goes_backwards():
"A valid time series where time changes linearly backwards"
values = [1.0] * 10
times = (EPOCH_START-i for i in range(10))
datapoints = [Point(x=t, y=v) for (v, t) in zip(values, time)]
features = wordgraph.describe(datapoints)
assert features is None
def test_random_data():
"A time series of 50 data points where every value is random"
rng = random.Random(0)
values = [rng.random() for i in range(50)]
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert features is None
## Instruction:
Test expected failures of the anlayzer
## Code After:
"Test for recalcitrant and obtuse graphs to describe"
from wordgraph.points import Point
import wordgraph
import random
import pytest
from utilities import EPOCH_START, time_values
def test_time_goes_backwards():
"A valid time series where time changes linearly backwards"
values = [1.0] * 10
times = (EPOCH_START-i for i in range(10))
datapoints = [Point(x=t, y=v) for (v, t) in zip(values, time)]
features = wordgraph.describe(datapoints)
assert features is None
def test_random_data():
"A time series of 50 data points where every value is random"
rng = random.Random(0)
values = [rng.random() for i in range(50)]
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert features is None
def test_too_few_points():
"""A time series with too few data points to be analysed.
Expected to raise an exception.
"""
with pytest.raises(ValueError):
features = wordgraph.describe([Point(x=0, y=0)])
def test_nonuniform_time_periods():
"""A time series where time periods are wildly different.
Expected to raise an exception.
"""
times = [1, 3, 4, 6, 7, 9, 10]
datapoints = [Point(x=t, y=1.0) for t in times]
with pytest.raises(ValueError):
features = wordgraph.describe(datapoints)
|
"Test for recalcitrant and obtuse graphs to describe"
from wordgraph.points import Point
import wordgraph
import random
+ import pytest
from utilities import EPOCH_START, time_values
def test_time_goes_backwards():
"A valid time series where time changes linearly backwards"
values = [1.0] * 10
times = (EPOCH_START-i for i in range(10))
datapoints = [Point(x=t, y=v) for (v, t) in zip(values, time)]
features = wordgraph.describe(datapoints)
assert features is None
def test_random_data():
"A time series of 50 data points where every value is random"
rng = random.Random(0)
values = [rng.random() for i in range(50)]
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert features is None
+
+ def test_too_few_points():
+ """A time series with too few data points to be analysed.
+
+ Expected to raise an exception.
+ """
+ with pytest.raises(ValueError):
+ features = wordgraph.describe([Point(x=0, y=0)])
+
+ def test_nonuniform_time_periods():
+ """A time series where time periods are wildly different.
+
+ Expected to raise an exception.
+ """
+ times = [1, 3, 4, 6, 7, 9, 10]
+ datapoints = [Point(x=t, y=1.0) for t in times]
+ with pytest.raises(ValueError):
+ features = wordgraph.describe(datapoints)
|
648e2907a5ea5f9157b5aabe4cec10a3d952f5a7
|
tests/test_scale.py
|
tests/test_scale.py
|
from hypothesis import assume, given
from ppb_vector import Vector
from utils import angle_isclose, floats, isclose, lengths, vectors
@given(x=vectors(), length=floats())
def test_scale_to_length(x: Vector, length: float):
"""Test that the length of x.scale_to(length) is length.
Additionally, Vector.scale_to may raise:
- ZeroDivisionError if the vector is null;
- ValueError if the desired length is negative.
"""
try:
assert isclose(x.scale_to(length).length, length)
except ZeroDivisionError:
assert x == (0, 0)
except ValueError:
assert length < 0
@given(x=vectors(), length=lengths())
def test_scale_aligned(x: Vector, length: float):
"""Test that x.scale_to(length) is aligned with x."""
assume(length > 0)
try:
assert angle_isclose(x.scale_to(length).angle(x), 0)
except ZeroDivisionError:
assert x == (0, 0)
|
from hypothesis import assume, given, strategies as st
from pytest import raises # type: ignore
from ppb_vector import Vector
from utils import angle_isclose, isclose, lengths, vectors
@given(v=vectors(), length=st.floats(max_value=0))
def test_scale_negative_length(v: Vector, length: float):
"""Test that Vector.scale_to raises ValueError on negative lengths."""
assume(length < 0)
with raises(ValueError):
v.scale_to(length)
@given(x=vectors(), length=lengths())
def test_scale_to_length(x: Vector, length: float):
"""Test that the length of x.scale_to(length) is length.
Additionally, scale_to may raise ZeroDivisionError if the vector is null.
"""
try:
assert isclose(x.scale_to(length).length, length)
except ZeroDivisionError:
assert x == (0, 0)
@given(x=vectors(), length=lengths())
def test_scale_aligned(x: Vector, length: float):
"""Test that x.scale_to(length) is aligned with x."""
assume(length > 0)
try:
assert angle_isclose(x.scale_to(length).angle(x), 0)
except ZeroDivisionError:
assert x == (0, 0)
|
Make a separate test for negative lengths
|
tests/scale: Make a separate test for negative lengths
Previously, we didn't check that negative lengths raise a ValueError,
but that *if* a ValueError was raised, then the length was negative.
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
- from hypothesis import assume, given
+ from hypothesis import assume, given, strategies as st
+ from pytest import raises # type: ignore
from ppb_vector import Vector
- from utils import angle_isclose, floats, isclose, lengths, vectors
+ from utils import angle_isclose, isclose, lengths, vectors
+ @given(v=vectors(), length=st.floats(max_value=0))
+ def test_scale_negative_length(v: Vector, length: float):
+ """Test that Vector.scale_to raises ValueError on negative lengths."""
+ assume(length < 0)
+ with raises(ValueError):
+ v.scale_to(length)
+
+
- @given(x=vectors(), length=floats())
+ @given(x=vectors(), length=lengths())
def test_scale_to_length(x: Vector, length: float):
"""Test that the length of x.scale_to(length) is length.
+ Additionally, scale_to may raise ZeroDivisionError if the vector is null.
- Additionally, Vector.scale_to may raise:
- - ZeroDivisionError if the vector is null;
- - ValueError if the desired length is negative.
-
"""
try:
assert isclose(x.scale_to(length).length, length)
except ZeroDivisionError:
assert x == (0, 0)
- except ValueError:
- assert length < 0
@given(x=vectors(), length=lengths())
def test_scale_aligned(x: Vector, length: float):
"""Test that x.scale_to(length) is aligned with x."""
assume(length > 0)
try:
assert angle_isclose(x.scale_to(length).angle(x), 0)
except ZeroDivisionError:
assert x == (0, 0)
|
Make a separate test for negative lengths
|
## Code Before:
from hypothesis import assume, given
from ppb_vector import Vector
from utils import angle_isclose, floats, isclose, lengths, vectors
@given(x=vectors(), length=floats())
def test_scale_to_length(x: Vector, length: float):
"""Test that the length of x.scale_to(length) is length.
Additionally, Vector.scale_to may raise:
- ZeroDivisionError if the vector is null;
- ValueError if the desired length is negative.
"""
try:
assert isclose(x.scale_to(length).length, length)
except ZeroDivisionError:
assert x == (0, 0)
except ValueError:
assert length < 0
@given(x=vectors(), length=lengths())
def test_scale_aligned(x: Vector, length: float):
"""Test that x.scale_to(length) is aligned with x."""
assume(length > 0)
try:
assert angle_isclose(x.scale_to(length).angle(x), 0)
except ZeroDivisionError:
assert x == (0, 0)
## Instruction:
Make a separate test for negative lengths
## Code After:
from hypothesis import assume, given, strategies as st
from pytest import raises # type: ignore
from ppb_vector import Vector
from utils import angle_isclose, isclose, lengths, vectors
@given(v=vectors(), length=st.floats(max_value=0))
def test_scale_negative_length(v: Vector, length: float):
"""Test that Vector.scale_to raises ValueError on negative lengths."""
assume(length < 0)
with raises(ValueError):
v.scale_to(length)
@given(x=vectors(), length=lengths())
def test_scale_to_length(x: Vector, length: float):
"""Test that the length of x.scale_to(length) is length.
Additionally, scale_to may raise ZeroDivisionError if the vector is null.
"""
try:
assert isclose(x.scale_to(length).length, length)
except ZeroDivisionError:
assert x == (0, 0)
@given(x=vectors(), length=lengths())
def test_scale_aligned(x: Vector, length: float):
"""Test that x.scale_to(length) is aligned with x."""
assume(length > 0)
try:
assert angle_isclose(x.scale_to(length).angle(x), 0)
except ZeroDivisionError:
assert x == (0, 0)
|
- from hypothesis import assume, given
+ from hypothesis import assume, given, strategies as st
? ++++++++++++++++++
+ from pytest import raises # type: ignore
from ppb_vector import Vector
- from utils import angle_isclose, floats, isclose, lengths, vectors
? --------
+ from utils import angle_isclose, isclose, lengths, vectors
+ @given(v=vectors(), length=st.floats(max_value=0))
+ def test_scale_negative_length(v: Vector, length: float):
+ """Test that Vector.scale_to raises ValueError on negative lengths."""
+ assume(length < 0)
+ with raises(ValueError):
+ v.scale_to(length)
+
+
- @given(x=vectors(), length=floats())
? - ^^
+ @given(x=vectors(), length=lengths())
? ^^^ +
def test_scale_to_length(x: Vector, length: float):
"""Test that the length of x.scale_to(length) is length.
+ Additionally, scale_to may raise ZeroDivisionError if the vector is null.
- Additionally, Vector.scale_to may raise:
- - ZeroDivisionError if the vector is null;
- - ValueError if the desired length is negative.
-
"""
try:
assert isclose(x.scale_to(length).length, length)
except ZeroDivisionError:
assert x == (0, 0)
- except ValueError:
- assert length < 0
@given(x=vectors(), length=lengths())
def test_scale_aligned(x: Vector, length: float):
"""Test that x.scale_to(length) is aligned with x."""
assume(length > 0)
try:
assert angle_isclose(x.scale_to(length).angle(x), 0)
except ZeroDivisionError:
assert x == (0, 0)
|
aee0c96593343b3b1064d38579bec666bd51c9fa
|
python/atemctrl.py
|
python/atemctrl.py
|
import sys
import time
import ATEM
def main(argv):
run_cmd = 1
atem_ip = argv[0].split(".")
ATEM.begin(int(atem_ip[0]), int(atem_ip[1]), int(atem_ip[2]), int(atem_ip[3]))
time_set = time.time() + 0.500
while run_cmd == 1:
time.sleep(0.05)
if argv[1] == 'program':
ATEM.runLoop()
if time.time() > time_set:
ATEM.setAuxSourceInput(0, !TBD!)
elif argv[1] != 'source':
ATEM.runLoop()
if time.time() > time_set:
ATEM.setAuxSourceInput(0, 1)
else:
ATEM.runLoop()
if time.time() > time_set:
ATEM.setProgramInputVideoSource(0, int(argv[2]))
ATEM.setPreviewInputVideoSource(0, int(argv[2]))
run_cmd = 0
if __name__ == "__main__":
main(sys.argv[1:])
|
import sys
import time
import ATEM
def main(argv):
run_cmd = 1
atem_ip = argv[0].split(".")
ATEM.begin(int(atem_ip[0]), int(atem_ip[1]), int(atem_ip[2]), int(atem_ip[3]))
time_set = time.time() + 0.500
while run_cmd == 1:
time.sleep(0.05)
if argv[1] == 'program':
ATEM.runLoop()
if time.time() > time_set:
ATEM.setAuxSourceInput(0, 10010)
run_cmd = 0
elif argv[1] == 'source':
ATEM.runLoop()
if time.time() > time_set:
ATEM.setAuxSourceInput(0, 1)
run_cmd = 0
else:
ATEM.runLoop()
if time.time() > time_set:
ATEM.setProgramInputVideoSource(0, int(argv[2]))
ATEM.setPreviewInputVideoSource(0, int(argv[2]))
run_cmd = 0
if __name__ == "__main__":
main(sys.argv[1:])
|
Fix script ending. Set value to show program in aux output.
|
Fix script ending. Set value to show program in aux output.
|
Python
|
mit
|
qrila/khvidcontrol,qrila/khvidcontrol
|
import sys
import time
import ATEM
def main(argv):
run_cmd = 1
atem_ip = argv[0].split(".")
ATEM.begin(int(atem_ip[0]), int(atem_ip[1]), int(atem_ip[2]), int(atem_ip[3]))
time_set = time.time() + 0.500
while run_cmd == 1:
time.sleep(0.05)
if argv[1] == 'program':
ATEM.runLoop()
if time.time() > time_set:
- ATEM.setAuxSourceInput(0, !TBD!)
+ ATEM.setAuxSourceInput(0, 10010)
+ run_cmd = 0
- elif argv[1] != 'source':
+ elif argv[1] == 'source':
ATEM.runLoop()
if time.time() > time_set:
ATEM.setAuxSourceInput(0, 1)
+ run_cmd = 0
else:
ATEM.runLoop()
if time.time() > time_set:
ATEM.setProgramInputVideoSource(0, int(argv[2]))
ATEM.setPreviewInputVideoSource(0, int(argv[2]))
+ run_cmd = 0
- run_cmd = 0
+
if __name__ == "__main__":
main(sys.argv[1:])
|
Fix script ending. Set value to show program in aux output.
|
## Code Before:
import sys
import time
import ATEM
def main(argv):
run_cmd = 1
atem_ip = argv[0].split(".")
ATEM.begin(int(atem_ip[0]), int(atem_ip[1]), int(atem_ip[2]), int(atem_ip[3]))
time_set = time.time() + 0.500
while run_cmd == 1:
time.sleep(0.05)
if argv[1] == 'program':
ATEM.runLoop()
if time.time() > time_set:
ATEM.setAuxSourceInput(0, !TBD!)
elif argv[1] != 'source':
ATEM.runLoop()
if time.time() > time_set:
ATEM.setAuxSourceInput(0, 1)
else:
ATEM.runLoop()
if time.time() > time_set:
ATEM.setProgramInputVideoSource(0, int(argv[2]))
ATEM.setPreviewInputVideoSource(0, int(argv[2]))
run_cmd = 0
if __name__ == "__main__":
main(sys.argv[1:])
## Instruction:
Fix script ending. Set value to show program in aux output.
## Code After:
import sys
import time
import ATEM
def main(argv):
run_cmd = 1
atem_ip = argv[0].split(".")
ATEM.begin(int(atem_ip[0]), int(atem_ip[1]), int(atem_ip[2]), int(atem_ip[3]))
time_set = time.time() + 0.500
while run_cmd == 1:
time.sleep(0.05)
if argv[1] == 'program':
ATEM.runLoop()
if time.time() > time_set:
ATEM.setAuxSourceInput(0, 10010)
run_cmd = 0
elif argv[1] == 'source':
ATEM.runLoop()
if time.time() > time_set:
ATEM.setAuxSourceInput(0, 1)
run_cmd = 0
else:
ATEM.runLoop()
if time.time() > time_set:
ATEM.setProgramInputVideoSource(0, int(argv[2]))
ATEM.setPreviewInputVideoSource(0, int(argv[2]))
run_cmd = 0
if __name__ == "__main__":
main(sys.argv[1:])
|
import sys
import time
import ATEM
def main(argv):
run_cmd = 1
atem_ip = argv[0].split(".")
ATEM.begin(int(atem_ip[0]), int(atem_ip[1]), int(atem_ip[2]), int(atem_ip[3]))
time_set = time.time() + 0.500
while run_cmd == 1:
time.sleep(0.05)
if argv[1] == 'program':
ATEM.runLoop()
if time.time() > time_set:
- ATEM.setAuxSourceInput(0, !TBD!)
? ^^^^^
+ ATEM.setAuxSourceInput(0, 10010)
? ^^^^^
+ run_cmd = 0
- elif argv[1] != 'source':
? ^
+ elif argv[1] == 'source':
? ^
ATEM.runLoop()
if time.time() > time_set:
ATEM.setAuxSourceInput(0, 1)
+ run_cmd = 0
else:
ATEM.runLoop()
if time.time() > time_set:
ATEM.setProgramInputVideoSource(0, int(argv[2]))
ATEM.setPreviewInputVideoSource(0, int(argv[2]))
+ run_cmd = 0
- run_cmd = 0
+
if __name__ == "__main__":
main(sys.argv[1:])
|
7f411fd01c931b73f717b114934662ebb2739555
|
spacy/sv/tokenizer_exceptions.py
|
spacy/sv/tokenizer_exceptions.py
|
from __future__ import unicode_literals
from ..symbols import *
from ..language_data import PRON_LEMMA
TOKENIZER_EXCEPTIONS = {
}
ORTH_ONLY = [
"ang.",
"anm.",
"bil.",
"bl.a.",
"ca",
"cm",
"dl",
"dvs.",
"e.Kr.",
"el.",
"e.d.",
"eng.",
"etc.",
"exkl.",
"f.d.",
"fid.",
"f.Kr.",
"forts.",
"fr.o.m.",
"f.ö.",
"förf.",
"ha",
"hg",
"inkl.",
"i sht",
"i st",
"jmf",
"jur.",
"kcal",
"kg",
"kl.",
"km",
"kr.",
"l",
"lat.",
"m",
"m.a.o.",
"max.",
"m.fl.",
"min.",
"mm",
"m.m.",
"ngn",
"ngt",
"nr",
"obs.",
"o.d.",
"osv.",
"p.g.a.",
"ref.",
"resp.",
"s.",
"s.a.s.",
"s.k.",
"st.",
"s:t",
"t.ex.",
"t.o.m.",
"tfn",
"ung.",
"äv.",
"övers."
]
|
from __future__ import unicode_literals
from ..symbols import *
from ..language_data import PRON_LEMMA
TOKENIZER_EXCEPTIONS = {
}
ORTH_ONLY = [
"ang.",
"anm.",
"bil.",
"bl.a.",
"dvs.",
"e.Kr.",
"el.",
"e.d.",
"eng.",
"etc.",
"exkl.",
"f.d.",
"fid.",
"f.Kr.",
"forts.",
"fr.o.m.",
"f.ö.",
"förf.",
"inkl.",
"jur.",
"kl.",
"kr.",
"lat.",
"m.a.o.",
"max.",
"m.fl.",
"min.",
"m.m.",
"obs.",
"o.d.",
"osv.",
"p.g.a.",
"ref.",
"resp.",
"s.",
"s.a.s.",
"s.k.",
"st.",
"s:t",
"t.ex.",
"t.o.m.",
"ung.",
"äv.",
"övers."
]
|
Remove exceptions containing whitespace / no special chars
|
Remove exceptions containing whitespace / no special chars
|
Python
|
mit
|
honnibal/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,raphael0202/spaCy,explosion/spaCy,Gregory-Howard/spaCy,explosion/spaCy,aikramer2/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,honnibal/spaCy,raphael0202/spaCy,banglakit/spaCy,explosion/spaCy,explosion/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,aikramer2/spaCy,raphael0202/spaCy,raphael0202/spaCy,honnibal/spaCy,raphael0202/spaCy,recognai/spaCy,raphael0202/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,recognai/spaCy,aikramer2/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,honnibal/spaCy,recognai/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,oroszgy/spaCy.hu,recognai/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy
|
from __future__ import unicode_literals
from ..symbols import *
from ..language_data import PRON_LEMMA
TOKENIZER_EXCEPTIONS = {
}
ORTH_ONLY = [
"ang.",
"anm.",
"bil.",
"bl.a.",
- "ca",
- "cm",
- "dl",
"dvs.",
"e.Kr.",
"el.",
"e.d.",
"eng.",
"etc.",
"exkl.",
"f.d.",
"fid.",
"f.Kr.",
"forts.",
"fr.o.m.",
"f.ö.",
"förf.",
- "ha",
- "hg",
"inkl.",
- "i sht",
- "i st",
- "jmf",
"jur.",
- "kcal",
- "kg",
"kl.",
- "km",
"kr.",
- "l",
"lat.",
- "m",
"m.a.o.",
"max.",
"m.fl.",
"min.",
- "mm",
"m.m.",
- "ngn",
- "ngt",
- "nr",
"obs.",
"o.d.",
"osv.",
"p.g.a.",
"ref.",
"resp.",
"s.",
"s.a.s.",
"s.k.",
"st.",
"s:t",
"t.ex.",
"t.o.m.",
- "tfn",
"ung.",
"äv.",
"övers."
]
|
Remove exceptions containing whitespace / no special chars
|
## Code Before:
from __future__ import unicode_literals
from ..symbols import *
from ..language_data import PRON_LEMMA
TOKENIZER_EXCEPTIONS = {
}
ORTH_ONLY = [
"ang.",
"anm.",
"bil.",
"bl.a.",
"ca",
"cm",
"dl",
"dvs.",
"e.Kr.",
"el.",
"e.d.",
"eng.",
"etc.",
"exkl.",
"f.d.",
"fid.",
"f.Kr.",
"forts.",
"fr.o.m.",
"f.ö.",
"förf.",
"ha",
"hg",
"inkl.",
"i sht",
"i st",
"jmf",
"jur.",
"kcal",
"kg",
"kl.",
"km",
"kr.",
"l",
"lat.",
"m",
"m.a.o.",
"max.",
"m.fl.",
"min.",
"mm",
"m.m.",
"ngn",
"ngt",
"nr",
"obs.",
"o.d.",
"osv.",
"p.g.a.",
"ref.",
"resp.",
"s.",
"s.a.s.",
"s.k.",
"st.",
"s:t",
"t.ex.",
"t.o.m.",
"tfn",
"ung.",
"äv.",
"övers."
]
## Instruction:
Remove exceptions containing whitespace / no special chars
## Code After:
from __future__ import unicode_literals
from ..symbols import *
from ..language_data import PRON_LEMMA
TOKENIZER_EXCEPTIONS = {
}
ORTH_ONLY = [
"ang.",
"anm.",
"bil.",
"bl.a.",
"dvs.",
"e.Kr.",
"el.",
"e.d.",
"eng.",
"etc.",
"exkl.",
"f.d.",
"fid.",
"f.Kr.",
"forts.",
"fr.o.m.",
"f.ö.",
"förf.",
"inkl.",
"jur.",
"kl.",
"kr.",
"lat.",
"m.a.o.",
"max.",
"m.fl.",
"min.",
"m.m.",
"obs.",
"o.d.",
"osv.",
"p.g.a.",
"ref.",
"resp.",
"s.",
"s.a.s.",
"s.k.",
"st.",
"s:t",
"t.ex.",
"t.o.m.",
"ung.",
"äv.",
"övers."
]
|
from __future__ import unicode_literals
from ..symbols import *
from ..language_data import PRON_LEMMA
TOKENIZER_EXCEPTIONS = {
}
ORTH_ONLY = [
"ang.",
"anm.",
"bil.",
"bl.a.",
- "ca",
- "cm",
- "dl",
"dvs.",
"e.Kr.",
"el.",
"e.d.",
"eng.",
"etc.",
"exkl.",
"f.d.",
"fid.",
"f.Kr.",
"forts.",
"fr.o.m.",
"f.ö.",
"förf.",
- "ha",
- "hg",
"inkl.",
- "i sht",
- "i st",
- "jmf",
"jur.",
- "kcal",
- "kg",
"kl.",
- "km",
"kr.",
- "l",
"lat.",
- "m",
"m.a.o.",
"max.",
"m.fl.",
"min.",
- "mm",
"m.m.",
- "ngn",
- "ngt",
- "nr",
"obs.",
"o.d.",
"osv.",
"p.g.a.",
"ref.",
"resp.",
"s.",
"s.a.s.",
"s.k.",
"st.",
"s:t",
"t.ex.",
"t.o.m.",
- "tfn",
"ung.",
"äv.",
"övers."
]
|
f0bf059cfa7b6edc366a0b3246eac1f3ab68e865
|
solutions/comparison.py
|
solutions/comparison.py
|
'''
comparison.py - Comparison of analytic and calculated solutions
Created on 12 Aug 2010
@author: Ian Huston
'''
from __future__ import division
import numpy as np
import analyticsolution
import calcedsolution
import fixtures
def compare_one_step(m, srcclass, nix):
"""
Compare the analytic and calculated solutions for equations from `srclass` using the
results from `m` at the timestep `nix`.
"""
fx = fixtures.fixture_from_model(m)
asol = analyticsolution.NoPhaseBunchDaviesSolution(fx, srcclass)
csol = calcedsolution.NoPhaseBunchDaviesCalced(fx, srcclass)
#Need to make analytic solution use 128 bit floats to avoid overruns
asol.srceqns.k = np.float128(asol.srceqns.k)
analytic_result = asol.full_source_from_model(m, nix)
calced_result = csol.full_source_from_model(m, nix)
difference = analytic_result - calced_result
error = np.abs(difference)/np.abs(analytic_result)
return difference, error, analytic_result, calced_result
|
'''
comparison.py - Comparison of analytic and calculated solutions
Created on 12 Aug 2010
@author: Ian Huston
'''
from __future__ import division
import numpy as np
import analyticsolution
import calcedsolution
import fixtures
def compare_one_step(m, srcclass, nix):
"""
Compare the analytic and calculated solutions for equations from `srclass` using the
results from `m` at the timestep `nix`.
"""
fx = fixtures.fixture_from_model(m)
asol = analyticsolution.NoPhaseBunchDaviesSolution(fx, srcclass)
csol = calcedsolution.NoPhaseBunchDaviesCalced(fx, srcclass)
#Need to make analytic solution use 128 bit floats to avoid overruns
asol.srceqns.k = np.float128(asol.srceqns.k)
analytic_result = asol.full_source_from_model(m, nix)
calced_result = csol.full_source_from_model(m, nix)
difference = analytic_result - calced_result
error = np.abs(difference)/np.abs(analytic_result)
return difference, error, analytic_result, calced_result, asol.srceqns.k
|
Return k with other results.
|
Return k with other results.
|
Python
|
bsd-3-clause
|
ihuston/pyflation,ihuston/pyflation
|
'''
comparison.py - Comparison of analytic and calculated solutions
Created on 12 Aug 2010
@author: Ian Huston
'''
from __future__ import division
import numpy as np
import analyticsolution
import calcedsolution
import fixtures
def compare_one_step(m, srcclass, nix):
"""
Compare the analytic and calculated solutions for equations from `srclass` using the
results from `m` at the timestep `nix`.
"""
fx = fixtures.fixture_from_model(m)
asol = analyticsolution.NoPhaseBunchDaviesSolution(fx, srcclass)
csol = calcedsolution.NoPhaseBunchDaviesCalced(fx, srcclass)
#Need to make analytic solution use 128 bit floats to avoid overruns
asol.srceqns.k = np.float128(asol.srceqns.k)
analytic_result = asol.full_source_from_model(m, nix)
calced_result = csol.full_source_from_model(m, nix)
difference = analytic_result - calced_result
error = np.abs(difference)/np.abs(analytic_result)
- return difference, error, analytic_result, calced_result
+ return difference, error, analytic_result, calced_result, asol.srceqns.k
|
Return k with other results.
|
## Code Before:
'''
comparison.py - Comparison of analytic and calculated solutions
Created on 12 Aug 2010
@author: Ian Huston
'''
from __future__ import division
import numpy as np
import analyticsolution
import calcedsolution
import fixtures
def compare_one_step(m, srcclass, nix):
"""
Compare the analytic and calculated solutions for equations from `srclass` using the
results from `m` at the timestep `nix`.
"""
fx = fixtures.fixture_from_model(m)
asol = analyticsolution.NoPhaseBunchDaviesSolution(fx, srcclass)
csol = calcedsolution.NoPhaseBunchDaviesCalced(fx, srcclass)
#Need to make analytic solution use 128 bit floats to avoid overruns
asol.srceqns.k = np.float128(asol.srceqns.k)
analytic_result = asol.full_source_from_model(m, nix)
calced_result = csol.full_source_from_model(m, nix)
difference = analytic_result - calced_result
error = np.abs(difference)/np.abs(analytic_result)
return difference, error, analytic_result, calced_result
## Instruction:
Return k with other results.
## Code After:
'''
comparison.py - Comparison of analytic and calculated solutions
Created on 12 Aug 2010
@author: Ian Huston
'''
from __future__ import division
import numpy as np
import analyticsolution
import calcedsolution
import fixtures
def compare_one_step(m, srcclass, nix):
"""
Compare the analytic and calculated solutions for equations from `srclass` using the
results from `m` at the timestep `nix`.
"""
fx = fixtures.fixture_from_model(m)
asol = analyticsolution.NoPhaseBunchDaviesSolution(fx, srcclass)
csol = calcedsolution.NoPhaseBunchDaviesCalced(fx, srcclass)
#Need to make analytic solution use 128 bit floats to avoid overruns
asol.srceqns.k = np.float128(asol.srceqns.k)
analytic_result = asol.full_source_from_model(m, nix)
calced_result = csol.full_source_from_model(m, nix)
difference = analytic_result - calced_result
error = np.abs(difference)/np.abs(analytic_result)
return difference, error, analytic_result, calced_result, asol.srceqns.k
|
'''
comparison.py - Comparison of analytic and calculated solutions
Created on 12 Aug 2010
@author: Ian Huston
'''
from __future__ import division
import numpy as np
import analyticsolution
import calcedsolution
import fixtures
def compare_one_step(m, srcclass, nix):
"""
Compare the analytic and calculated solutions for equations from `srclass` using the
results from `m` at the timestep `nix`.
"""
fx = fixtures.fixture_from_model(m)
asol = analyticsolution.NoPhaseBunchDaviesSolution(fx, srcclass)
csol = calcedsolution.NoPhaseBunchDaviesCalced(fx, srcclass)
#Need to make analytic solution use 128 bit floats to avoid overruns
asol.srceqns.k = np.float128(asol.srceqns.k)
analytic_result = asol.full_source_from_model(m, nix)
calced_result = csol.full_source_from_model(m, nix)
difference = analytic_result - calced_result
error = np.abs(difference)/np.abs(analytic_result)
- return difference, error, analytic_result, calced_result
+ return difference, error, analytic_result, calced_result, asol.srceqns.k
? ++++++++++++++++
|
8e623faa44ad767baf4c92596a2501f98dfd2bbb
|
src/masterfile/validators/__init__.py
|
src/masterfile/validators/__init__.py
|
from __future__ import absolute_import
from . import io_validator
from . import index_column_validator
|
from __future__ import absolute_import
from . import ( # noqa
io_validator,
index_column_validator
)
|
Clean up validator package imports
|
Clean up validator package imports
|
Python
|
mit
|
njvack/masterfile
|
from __future__ import absolute_import
- from . import io_validator
- from . import index_column_validator
+ from . import ( # noqa
+ io_validator,
+ index_column_validator
+ )
-
|
Clean up validator package imports
|
## Code Before:
from __future__ import absolute_import
from . import io_validator
from . import index_column_validator
## Instruction:
Clean up validator package imports
## Code After:
from __future__ import absolute_import
from . import ( # noqa
io_validator,
index_column_validator
)
|
from __future__ import absolute_import
- from . import io_validator
+ from . import ( # noqa
+ io_validator,
- from . import index_column_validator
? ---- - ^^^^^^
+ index_column_validator
? ^
-
+ )
|
ccb7446b02b394af308f4fba0500d402240f117e
|
home/migrations/0002_create_homepage.py
|
home/migrations/0002_create_homepage.py
|
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page=homepage, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page_id=homepage.id, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
Remove any existing localhost sites and use the page id rather than the object to set the default homepage.
|
Remove any existing localhost sites and use the page id rather than the object to set the default homepage.
|
Python
|
mit
|
OpenCanada/lindinitiative,OpenCanada/lindinitiative
|
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
+ Site = apps.get_model('wagtailcore.Site')
+ HomePage = apps.get_model("core", "HomePage")
+ homepage = HomePage.objects.get(slug="home")
+
+ Site.objects.filter(hostname='localhost').delete()
+
# Create a site with the new homepage set as the root
Site.objects.create(
- hostname='localhost', root_page=homepage, is_default_site=True)
+ hostname='localhost', root_page_id=homepage.id, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
Remove any existing localhost sites and use the page id rather than the object to set the default homepage.
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page=homepage, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
## Instruction:
Remove any existing localhost sites and use the page id rather than the object to set the default homepage.
## Code After:
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page_id=homepage.id, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
+ Site = apps.get_model('wagtailcore.Site')
+ HomePage = apps.get_model("core", "HomePage")
+ homepage = HomePage.objects.get(slug="home")
+
+ Site.objects.filter(hostname='localhost').delete()
+
# Create a site with the new homepage set as the root
Site.objects.create(
- hostname='localhost', root_page=homepage, is_default_site=True)
+ hostname='localhost', root_page_id=homepage.id, is_default_site=True)
? +++ +++
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
fe167bfd25c0c86b3c6fb5ef76eb24036ad2b6da
|
tests/ne_np/__init__.py
|
tests/ne_np/__init__.py
|
from __future__ import unicode_literals
import unittest
import re
from faker import Factory
from faker.utils import text
from .. import string_types
class ne_NP_FactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
countries = Provider.countries
country = self.factory.country()
assert country
assert isinstance(country, string_types)
assert country in countries
districts = Provider.districts
district = self.factory.district()
assert district
assert isinstance(district, string_types)
assert district in districts
cities = Provider.cities
city = self.factory.city()
assert city
assert isinstance(city, string_types)
assert city in cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
first_names = Provider.first_names
name = self.factory.name()
first_name, last_name = name.split()
assert first_name
assert isinstance(first_name, string_types)
assert first_name in first_names
last_names = Provider.last_names
assert last_names
assert isinstance(last_name, string_types)
assert last_name in last_names
|
from __future__ import unicode_literals
import unittest
from faker import Factory
from .. import string_types
class NeNPFactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
country = self.factory.country()
assert isinstance(country, string_types)
assert country in Provider.countries
district = self.factory.district()
assert isinstance(district, string_types)
assert district in Provider.districts
city = self.factory.city()
assert isinstance(city, string_types)
assert city in Provider.cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
for _ in range(10000):
name = self.factory.name().split()
assert all(isinstance(n, string_types) for n in name)
# name should always be 2-3 words. If 3, first word
# should be a prefix.
assert name[-2] in Provider.first_names
assert name[-1] in Provider.last_names
prefixes = Provider.prefixes_male + Provider.prefixes_female
if len(name) == 3:
assert name[0] in prefixes
|
Fix incorrect ne_NP locale tests
|
Fix incorrect ne_NP locale tests
This test incorrectly assumes a call to name() will
yield only a first/last name, which isn't always true for this
locale. I suspect it hasn't been uncovered yet because the
tests are seeded the same at the beginning of every run. It only
becomes a problem when you start moving tests around. This change
addresses the incorrect assertions as well as makes the file PEP8
compliant.
|
Python
|
mit
|
trtd/faker,joke2k/faker,joke2k/faker,danhuss/faker
|
from __future__ import unicode_literals
import unittest
- import re
from faker import Factory
- from faker.utils import text
from .. import string_types
- class ne_NP_FactoryTestCase(unittest.TestCase):
+ class NeNPFactoryTestCase(unittest.TestCase):
-
+
def setUp(self):
- self.factory = Factory.create('ne_NP')
+ self.factory = Factory.create('ne_NP')
def test_address(self):
- from faker.providers.address.ne_NP import Provider
+ from faker.providers.address.ne_NP import Provider
- countries = Provider.countries
- country = self.factory.country()
+ country = self.factory.country()
- assert country
- assert isinstance(country, string_types)
+ assert isinstance(country, string_types)
- assert country in countries
+ assert country in Provider.countries
-
- districts = Provider.districts
- district = self.factory.district()
- assert district
- assert isinstance(district, string_types)
- assert district in districts
- cities = Provider.cities
+ district = self.factory.district()
+ assert isinstance(district, string_types)
+ assert district in Provider.districts
+
- city = self.factory.city()
+ city = self.factory.city()
- assert city
- assert isinstance(city, string_types)
+ assert isinstance(city, string_types)
- assert city in cities
+ assert city in Provider.cities
-
+
def test_names(self):
- from faker.providers.person.ne_NP import Provider
+ from faker.providers.person.ne_NP import Provider
- first_names = Provider.first_names
+ for _ in range(10000):
- name = self.factory.name()
+ name = self.factory.name().split()
+ assert all(isinstance(n, string_types) for n in name)
+ # name should always be 2-3 words. If 3, first word
+ # should be a prefix.
+ assert name[-2] in Provider.first_names
- first_name, last_name = name.split()
- assert first_name
- assert isinstance(first_name, string_types)
- assert first_name in first_names
-
- last_names = Provider.last_names
+ assert name[-1] in Provider.last_names
+ prefixes = Provider.prefixes_male + Provider.prefixes_female
+ if len(name) == 3:
+ assert name[0] in prefixes
+
- assert last_names
- assert isinstance(last_name, string_types)
- assert last_name in last_names
-
-
-
|
Fix incorrect ne_NP locale tests
|
## Code Before:
from __future__ import unicode_literals
import unittest
import re
from faker import Factory
from faker.utils import text
from .. import string_types
class ne_NP_FactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
countries = Provider.countries
country = self.factory.country()
assert country
assert isinstance(country, string_types)
assert country in countries
districts = Provider.districts
district = self.factory.district()
assert district
assert isinstance(district, string_types)
assert district in districts
cities = Provider.cities
city = self.factory.city()
assert city
assert isinstance(city, string_types)
assert city in cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
first_names = Provider.first_names
name = self.factory.name()
first_name, last_name = name.split()
assert first_name
assert isinstance(first_name, string_types)
assert first_name in first_names
last_names = Provider.last_names
assert last_names
assert isinstance(last_name, string_types)
assert last_name in last_names
## Instruction:
Fix incorrect ne_NP locale tests
## Code After:
from __future__ import unicode_literals
import unittest
from faker import Factory
from .. import string_types
class NeNPFactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
country = self.factory.country()
assert isinstance(country, string_types)
assert country in Provider.countries
district = self.factory.district()
assert isinstance(district, string_types)
assert district in Provider.districts
city = self.factory.city()
assert isinstance(city, string_types)
assert city in Provider.cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
for _ in range(10000):
name = self.factory.name().split()
assert all(isinstance(n, string_types) for n in name)
# name should always be 2-3 words. If 3, first word
# should be a prefix.
assert name[-2] in Provider.first_names
assert name[-1] in Provider.last_names
prefixes = Provider.prefixes_male + Provider.prefixes_female
if len(name) == 3:
assert name[0] in prefixes
|
from __future__ import unicode_literals
import unittest
- import re
from faker import Factory
- from faker.utils import text
from .. import string_types
- class ne_NP_FactoryTestCase(unittest.TestCase):
? ^ - -
+ class NeNPFactoryTestCase(unittest.TestCase):
? ^
-
+
def setUp(self):
- self.factory = Factory.create('ne_NP')
+ self.factory = Factory.create('ne_NP')
? ++
def test_address(self):
- from faker.providers.address.ne_NP import Provider
+ from faker.providers.address.ne_NP import Provider
? ++
- countries = Provider.countries
- country = self.factory.country()
? --
+ country = self.factory.country()
? ++
- assert country
- assert isinstance(country, string_types)
+ assert isinstance(country, string_types)
? ++
- assert country in countries
+ assert country in Provider.countries
? ++ +++++++++
-
- districts = Provider.districts
- district = self.factory.district()
- assert district
- assert isinstance(district, string_types)
- assert district in districts
- cities = Provider.cities
+ district = self.factory.district()
+ assert isinstance(district, string_types)
+ assert district in Provider.districts
+
- city = self.factory.city()
? --
+ city = self.factory.city()
? ++
- assert city
- assert isinstance(city, string_types)
+ assert isinstance(city, string_types)
? ++
- assert city in cities
+ assert city in Provider.cities
? ++ +++++++++
-
+
def test_names(self):
- from faker.providers.person.ne_NP import Provider
+ from faker.providers.person.ne_NP import Provider
? ++
- first_names = Provider.first_names
+ for _ in range(10000):
- name = self.factory.name()
+ name = self.factory.name().split()
? ++++++ ++++++++
+ assert all(isinstance(n, string_types) for n in name)
+ # name should always be 2-3 words. If 3, first word
+ # should be a prefix.
+ assert name[-2] in Provider.first_names
- first_name, last_name = name.split()
- assert first_name
- assert isinstance(first_name, string_types)
- assert first_name in first_names
-
- last_names = Provider.last_names
? ^ ^ ^ ^
+ assert name[-1] in Provider.last_names
? ^^^^^^ +++ ^ ^^^^ ^^
+ prefixes = Provider.prefixes_male + Provider.prefixes_female
+ if len(name) == 3:
+ assert name[0] in prefixes
- assert last_names
- assert isinstance(last_name, string_types)
- assert last_name in last_names
-
-
-
|
de31fba90a541f272868d5868b402af3d2902ecc
|
labonneboite/common/maps/constants.py
|
labonneboite/common/maps/constants.py
|
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
DEFAULT_TRAVEL_MODE = CAR_MODE
TRAVEL_MODES = (
PUBLIC_MODE,
CAR_MODE,
)
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
|
ENABLE_CAR_MODE = True
ENABLE_PUBLIC_MODE = True
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
TRAVEL_MODES = ()
if ENABLE_PUBLIC_MODE:
TRAVEL_MODES += (PUBLIC_MODE,)
if ENABLE_CAR_MODE:
TRAVEL_MODES += (CAR_MODE,)
if ENABLE_CAR_MODE:
DEFAULT_TRAVEL_MODE = CAR_MODE
else:
DEFAULT_TRAVEL_MODE = PUBLIC_MODE
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
|
Add option to enable/disable each travel_mode
|
Add option to enable/disable each travel_mode
|
Python
|
agpl-3.0
|
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
|
+ ENABLE_CAR_MODE = True
+ ENABLE_PUBLIC_MODE = True
+
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
- DEFAULT_TRAVEL_MODE = CAR_MODE
+ TRAVEL_MODES = ()
+ if ENABLE_PUBLIC_MODE:
+ TRAVEL_MODES += (PUBLIC_MODE,)
+ if ENABLE_CAR_MODE:
+ TRAVEL_MODES += (CAR_MODE,)
- TRAVEL_MODES = (
- PUBLIC_MODE,
- CAR_MODE,
- )
+ if ENABLE_CAR_MODE:
+ DEFAULT_TRAVEL_MODE = CAR_MODE
+ else:
+ DEFAULT_TRAVEL_MODE = PUBLIC_MODE
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
|
Add option to enable/disable each travel_mode
|
## Code Before:
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
DEFAULT_TRAVEL_MODE = CAR_MODE
TRAVEL_MODES = (
PUBLIC_MODE,
CAR_MODE,
)
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
## Instruction:
Add option to enable/disable each travel_mode
## Code After:
ENABLE_CAR_MODE = True
ENABLE_PUBLIC_MODE = True
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
TRAVEL_MODES = ()
if ENABLE_PUBLIC_MODE:
TRAVEL_MODES += (PUBLIC_MODE,)
if ENABLE_CAR_MODE:
TRAVEL_MODES += (CAR_MODE,)
if ENABLE_CAR_MODE:
DEFAULT_TRAVEL_MODE = CAR_MODE
else:
DEFAULT_TRAVEL_MODE = PUBLIC_MODE
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
|
+ ENABLE_CAR_MODE = True
+ ENABLE_PUBLIC_MODE = True
+
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
- DEFAULT_TRAVEL_MODE = CAR_MODE
+ TRAVEL_MODES = ()
+ if ENABLE_PUBLIC_MODE:
+ TRAVEL_MODES += (PUBLIC_MODE,)
+ if ENABLE_CAR_MODE:
+ TRAVEL_MODES += (CAR_MODE,)
- TRAVEL_MODES = (
- PUBLIC_MODE,
- CAR_MODE,
- )
+ if ENABLE_CAR_MODE:
+ DEFAULT_TRAVEL_MODE = CAR_MODE
+ else:
+ DEFAULT_TRAVEL_MODE = PUBLIC_MODE
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
|
adcaa3bd5feb0939a6ffae8ce4637f5fd8369f2d
|
tests/base_test.py
|
tests/base_test.py
|
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def shortDescription(self):
"""
Get's the one liner description to be displayed.
Source:
http://erikzaadi.com/2012/09/13/inheritance-within-python-unit-tests/
"""
doc = self._testMethodDoc
doc = doc and doc.split("\n")[0].strip() or ""
doc = "%s : %s" % (self.__class__.__name__, doc)
return doc
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
Improve testing docstring output for inherited classes
|
Improve testing docstring output for inherited classes
|
Python
|
mit
|
ashleysommer/sanic-cors,corydolphin/flask-cors
|
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
+ def shortDescription(self):
+ """
+ Get's the one liner description to be displayed.
+ Source:
+ http://erikzaadi.com/2012/09/13/inheritance-within-python-unit-tests/
+ """
+ doc = self._testMethodDoc
+ doc = doc and doc.split("\n")[0].strip() or ""
+ doc = "%s : %s" % (self.__class__.__name__, doc)
+ return doc
+
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
Improve testing docstring output for inherited classes
|
## Code Before:
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
## Instruction:
Improve testing docstring output for inherited classes
## Code After:
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def shortDescription(self):
"""
Get's the one liner description to be displayed.
Source:
http://erikzaadi.com/2012/09/13/inheritance-within-python-unit-tests/
"""
doc = self._testMethodDoc
doc = doc and doc.split("\n")[0].strip() or ""
doc = "%s : %s" % (self.__class__.__name__, doc)
return doc
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
+ def shortDescription(self):
+ """
+ Get's the one liner description to be displayed.
+ Source:
+ http://erikzaadi.com/2012/09/13/inheritance-within-python-unit-tests/
+ """
+ doc = self._testMethodDoc
+ doc = doc and doc.split("\n")[0].strip() or ""
+ doc = "%s : %s" % (self.__class__.__name__, doc)
+ return doc
+
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
f9648e4b48d2affee103ad5f229492254e3e4dc8
|
web3/web3/jsonrpc.py
|
web3/web3/jsonrpc.py
|
class Jsonrpc(object):
def __init__(self):
self.messageId = 0
@staticmethod
def getInstance():
return Jsonrpc()
def toPayload(self, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
self.messageId += 1
return {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": self.messageId
}
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
def toBatchPayload(self, messages):
return [self.toPayload(message["method"], message["params"]) for]
|
import json
class Jsonrpc(object):
def toPayload(self, reqid, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
return json.dumps({
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": reqid
})
def fromPayload(self, raw):
result = json.loads(raw)
if not Jsonrpc.isValidResponse(result):
raise errors.InvalidResponse(result)
return result
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
# def toBatchPayload(self, messages):
# return [self.toPayload(message["method"], message["params"]) for]
|
Move message id generation to requestmanager
|
Move message id generation to requestmanager
|
Python
|
mit
|
pipermerriam/web3.py,shravan-shandilya/web3.py
|
+ import json
+
class Jsonrpc(object):
- def __init__(self):
- self.messageId = 0
-
- @staticmethod
- def getInstance():
- return Jsonrpc()
-
- def toPayload(self, method, params):
+ def toPayload(self, reqid, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
+ return json.dumps({
- self.messageId += 1
-
- return {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
- "id": self.messageId
+ "id": reqid
- }
+ })
+
+ def fromPayload(self, raw):
+ result = json.loads(raw)
+ if not Jsonrpc.isValidResponse(result):
+ raise errors.InvalidResponse(result)
+ return result
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
- def toBatchPayload(self, messages):
+ # def toBatchPayload(self, messages):
- return [self.toPayload(message["method"], message["params"]) for]
+ # return [self.toPayload(message["method"], message["params"]) for]
|
Move message id generation to requestmanager
|
## Code Before:
class Jsonrpc(object):
def __init__(self):
self.messageId = 0
@staticmethod
def getInstance():
return Jsonrpc()
def toPayload(self, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
self.messageId += 1
return {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": self.messageId
}
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
def toBatchPayload(self, messages):
return [self.toPayload(message["method"], message["params"]) for]
## Instruction:
Move message id generation to requestmanager
## Code After:
import json
class Jsonrpc(object):
def toPayload(self, reqid, method, params):
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
return json.dumps({
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": reqid
})
def fromPayload(self, raw):
result = json.loads(raw)
if not Jsonrpc.isValidResponse(result):
raise errors.InvalidResponse(result)
return result
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
# def toBatchPayload(self, messages):
# return [self.toPayload(message["method"], message["params"]) for]
|
+ import json
+
class Jsonrpc(object):
- def __init__(self):
- self.messageId = 0
-
- @staticmethod
- def getInstance():
- return Jsonrpc()
-
- def toPayload(self, method, params):
+ def toPayload(self, reqid, method, params):
? +++++++
"""
Should be called to valid json create payload object
"""
if not method:
raise Exception("jsonrpc method should be specified!")
+ return json.dumps({
- self.messageId += 1
-
- return {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
- "id": self.messageId
+ "id": reqid
- }
+ })
? +
+
+ def fromPayload(self, raw):
+ result = json.loads(raw)
+ if not Jsonrpc.isValidResponse(result):
+ raise errors.InvalidResponse(result)
+ return result
def isValidResponse(self, response):
"""
Should be called to check if jsonrpc response is valid
"""
return response is not None and not response["error"] and \
response["jsonrpc"] == "2.0" and \
utils.isInteger(response["id"]) and \
response["result"] is not None
- def toBatchPayload(self, messages):
+ # def toBatchPayload(self, messages):
? ++
- return [self.toPayload(message["method"], message["params"]) for]
+ # return [self.toPayload(message["method"], message["params"]) for]
? +
|
3681b5a485662656d6419d95ad89f1fbdb7a2a50
|
myuw/context_processors.py
|
myuw/context_processors.py
|
def is_hybrid(request):
return {
'is_hybrid': 'HTTP_MYUW_HYBRID' in request.META
}
|
def is_hybrid(request):
return {
'is_hybrid': 'MyUW_Hybrid/1.0' in request.META['HTTP_USER_AGENT']
}
|
Update context processer to check for custom hybrid user agent.
|
Update context processer to check for custom hybrid user agent.
|
Python
|
apache-2.0
|
uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw
|
def is_hybrid(request):
return {
- 'is_hybrid': 'HTTP_MYUW_HYBRID' in request.META
+ 'is_hybrid': 'MyUW_Hybrid/1.0' in request.META['HTTP_USER_AGENT']
}
|
Update context processer to check for custom hybrid user agent.
|
## Code Before:
def is_hybrid(request):
return {
'is_hybrid': 'HTTP_MYUW_HYBRID' in request.META
}
## Instruction:
Update context processer to check for custom hybrid user agent.
## Code After:
def is_hybrid(request):
return {
'is_hybrid': 'MyUW_Hybrid/1.0' in request.META['HTTP_USER_AGENT']
}
|
def is_hybrid(request):
return {
- 'is_hybrid': 'HTTP_MYUW_HYBRID' in request.META
+ 'is_hybrid': 'MyUW_Hybrid/1.0' in request.META['HTTP_USER_AGENT']
}
|
743f4affcd89aa3d9fd37774e2e5f8e05525cb04
|
api/sync_wallet.py
|
api/sync_wallet.py
|
import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
response_data = syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
response = { 'status': 'OK', 'data': response_data }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
|
import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
response = { 'status': 'OK' }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
|
Clean up return value for API
|
Clean up return value for API
|
Python
|
agpl-3.0
|
ripper234/omniwallet,maran/omniwallet,maran/omniwallet,Nevtep/omniwallet,FuzzyBearBTC/omniwallet,FuzzyBearBTC/omniwallet,achamely/omniwallet,curtislacy/omniwallet,habibmasuro/omniwallet,OmniLayer/omniwallet,ripper234/omniwallet,habibmasuro/omniwallet,ripper234/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet,curtislacy/omniwallet,OmniLayer/omniwallet,dexX7/omniwallet,arowser/omniwallet,habibmasuro/omniwallet,dexX7/omniwallet,Nevtep/omniwallet,VukDukic/omniwallet,arowser/omniwallet,achamely/omniwallet,FuzzyBearBTC/omniwallet,maran/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,Nevtep/omniwallet,achamely/omniwallet,arowser/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,achamely/omniwallet,dexX7/omniwallet,curtislacy/omniwallet
|
import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
- response_data = syncWallets(request_dict['masterWallets'][0])
+ syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
- response = { 'status': 'OK', 'data': response_data }
+ response = { 'status': 'OK' }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
|
Clean up return value for API
|
## Code Before:
import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
response_data = syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
response = { 'status': 'OK', 'data': response_data }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
## Instruction:
Clean up return value for API
## Code After:
import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
response = { 'status': 'OK' }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
|
import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
- response_data = syncWallets(request_dict['masterWallets'][0])
? ----------------
+ syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
- response = { 'status': 'OK', 'data': response_data }
+ response = { 'status': 'OK' }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
|
f85425a2c74cf15555bbed233287ddbd7ab8b24e
|
flexget/ui/plugins/log/log.py
|
flexget/ui/plugins/log/log.py
|
from __future__ import unicode_literals, division, absolute_import
from flexget.ui import register_plugin, Blueprint, register_menu
log = Blueprint('log', __name__)
register_plugin(log)
log.register_angular_route(
'',
url=log.url_prefix,
template_url='index.html',
controller='LogViewCtrl'
)
log.register_css('log', 'css/log.css', order=99)
log.register_js('log', 'js/log.js')
log.register_js('angular-oboe', 'js/libs/angular-oboe.js')
log.register_js('oboe-browser', 'js/libs/oboe-browser.js')
register_menu(log.url_prefix, 'Log', icon='fa fa-file-text-o')
|
from __future__ import unicode_literals, division, absolute_import
from flexget.ui import register_plugin, Blueprint, register_menu
log = Blueprint('log', __name__)
register_plugin(log)
log.register_angular_route(
'',
url=log.url_prefix,
template_url='index.html',
controller='LogViewCtrl'
)
log.register_css('log', 'css/log.css', order=99)
log.register_js('log', 'js/log.js')
log.register_js('angular-oboe', 'libs/oboe/js/angular-oboe.js')
log.register_js('oboe-browser', 'libs/oboe/js/oboe-browser.js')
register_menu(log.url_prefix, 'Log', icon='fa fa-file-text-o')
|
Rename libs to keep with standard
|
Rename libs to keep with standard
|
Python
|
mit
|
LynxyssCZ/Flexget,qvazzler/Flexget,tobinjt/Flexget,malkavi/Flexget,ZefQ/Flexget,qk4l/Flexget,Flexget/Flexget,tsnoam/Flexget,ianstalk/Flexget,grrr2/Flexget,jacobmetrick/Flexget,qvazzler/Flexget,crawln45/Flexget,poulpito/Flexget,JorisDeRieck/Flexget,gazpachoking/Flexget,tobinjt/Flexget,oxc/Flexget,dsemi/Flexget,jawilson/Flexget,malkavi/Flexget,tsnoam/Flexget,jacobmetrick/Flexget,OmgOhnoes/Flexget,tarzasai/Flexget,jacobmetrick/Flexget,dsemi/Flexget,Danfocus/Flexget,antivirtel/Flexget,Danfocus/Flexget,drwyrm/Flexget,tobinjt/Flexget,JorisDeRieck/Flexget,drwyrm/Flexget,JorisDeRieck/Flexget,antivirtel/Flexget,lildadou/Flexget,tarzasai/Flexget,offbyone/Flexget,grrr2/Flexget,qvazzler/Flexget,Pretagonist/Flexget,crawln45/Flexget,Pretagonist/Flexget,grrr2/Flexget,tsnoam/Flexget,ZefQ/Flexget,dsemi/Flexget,poulpito/Flexget,crawln45/Flexget,gazpachoking/Flexget,sean797/Flexget,Pretagonist/Flexget,lildadou/Flexget,LynxyssCZ/Flexget,JorisDeRieck/Flexget,Flexget/Flexget,OmgOhnoes/Flexget,Flexget/Flexget,Flexget/Flexget,sean797/Flexget,LynxyssCZ/Flexget,jawilson/Flexget,tarzasai/Flexget,malkavi/Flexget,jawilson/Flexget,Danfocus/Flexget,antivirtel/Flexget,ianstalk/Flexget,OmgOhnoes/Flexget,jawilson/Flexget,oxc/Flexget,crawln45/Flexget,cvium/Flexget,qk4l/Flexget,sean797/Flexget,Danfocus/Flexget,lildadou/Flexget,LynxyssCZ/Flexget,ianstalk/Flexget,poulpito/Flexget,offbyone/Flexget,ZefQ/Flexget,tobinjt/Flexget,qk4l/Flexget,cvium/Flexget,cvium/Flexget,drwyrm/Flexget,offbyone/Flexget,malkavi/Flexget,oxc/Flexget
|
from __future__ import unicode_literals, division, absolute_import
from flexget.ui import register_plugin, Blueprint, register_menu
log = Blueprint('log', __name__)
register_plugin(log)
log.register_angular_route(
'',
url=log.url_prefix,
template_url='index.html',
controller='LogViewCtrl'
)
log.register_css('log', 'css/log.css', order=99)
log.register_js('log', 'js/log.js')
- log.register_js('angular-oboe', 'js/libs/angular-oboe.js')
+ log.register_js('angular-oboe', 'libs/oboe/js/angular-oboe.js')
- log.register_js('oboe-browser', 'js/libs/oboe-browser.js')
+ log.register_js('oboe-browser', 'libs/oboe/js/oboe-browser.js')
register_menu(log.url_prefix, 'Log', icon='fa fa-file-text-o')
|
Rename libs to keep with standard
|
## Code Before:
from __future__ import unicode_literals, division, absolute_import
from flexget.ui import register_plugin, Blueprint, register_menu
log = Blueprint('log', __name__)
register_plugin(log)
log.register_angular_route(
'',
url=log.url_prefix,
template_url='index.html',
controller='LogViewCtrl'
)
log.register_css('log', 'css/log.css', order=99)
log.register_js('log', 'js/log.js')
log.register_js('angular-oboe', 'js/libs/angular-oboe.js')
log.register_js('oboe-browser', 'js/libs/oboe-browser.js')
register_menu(log.url_prefix, 'Log', icon='fa fa-file-text-o')
## Instruction:
Rename libs to keep with standard
## Code After:
from __future__ import unicode_literals, division, absolute_import
from flexget.ui import register_plugin, Blueprint, register_menu
log = Blueprint('log', __name__)
register_plugin(log)
log.register_angular_route(
'',
url=log.url_prefix,
template_url='index.html',
controller='LogViewCtrl'
)
log.register_css('log', 'css/log.css', order=99)
log.register_js('log', 'js/log.js')
log.register_js('angular-oboe', 'libs/oboe/js/angular-oboe.js')
log.register_js('oboe-browser', 'libs/oboe/js/oboe-browser.js')
register_menu(log.url_prefix, 'Log', icon='fa fa-file-text-o')
|
from __future__ import unicode_literals, division, absolute_import
from flexget.ui import register_plugin, Blueprint, register_menu
log = Blueprint('log', __name__)
register_plugin(log)
log.register_angular_route(
'',
url=log.url_prefix,
template_url='index.html',
controller='LogViewCtrl'
)
log.register_css('log', 'css/log.css', order=99)
log.register_js('log', 'js/log.js')
- log.register_js('angular-oboe', 'js/libs/angular-oboe.js')
? ---
+ log.register_js('angular-oboe', 'libs/oboe/js/angular-oboe.js')
? ++++++++
- log.register_js('oboe-browser', 'js/libs/oboe-browser.js')
? ---
+ log.register_js('oboe-browser', 'libs/oboe/js/oboe-browser.js')
? ++++++++
register_menu(log.url_prefix, 'Log', icon='fa fa-file-text-o')
|
ada3d309541daaa8591a6bcb6ec42a2a2ff468db
|
catsnap/worker/tasks.py
|
catsnap/worker/tasks.py
|
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
print 'yeah hi'
self.retry(e)
else:
raise
|
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
self.retry(e)
else:
raise
|
Remove a line of debug output
|
Remove a line of debug output
|
Python
|
mit
|
ErinCall/catsnap,ErinCall/catsnap,ErinCall/catsnap
|
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
- print 'yeah hi'
self.retry(e)
else:
raise
|
Remove a line of debug output
|
## Code Before:
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
print 'yeah hi'
self.retry(e)
else:
raise
## Instruction:
Remove a line of debug output
## Code After:
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
self.retry(e)
else:
raise
|
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
- print 'yeah hi'
self.retry(e)
else:
raise
|
54e715f26ed62e62e8794d8084110091c8db580b
|
oauth_provider/utils.py
|
oauth_provider/utils.py
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=request.META,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
# Django converts Authorization header in HTTP_AUTHORIZATION
# Warning: it doesn't happen in tests but it's useful, do not remove!
auth_header = {}
if 'Authorization' in request.META:
auth_header = {'Authorization': request.META['Authorization']}
elif 'HTTP_AUTHORIZATION' in request.META:
auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']}
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=auth_header,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
|
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
|
Python
|
bsd-3-clause
|
lukegb/django-oauth-plus,amrox/django-oauth-plus
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
+ # Django converts Authorization header in HTTP_AUTHORIZATION
+ # Warning: it doesn't happen in tests but it's useful, do not remove!
+ auth_header = {}
+ if 'Authorization' in request.META:
+ auth_header = {'Authorization': request.META['Authorization']}
+ elif 'HTTP_AUTHORIZATION' in request.META:
+ auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']}
+
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
- headers=request.META,
+ headers=auth_header,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
|
## Code Before:
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=request.META,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
## Instruction:
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
## Code After:
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
# Django converts Authorization header in HTTP_AUTHORIZATION
# Warning: it doesn't happen in tests but it's useful, do not remove!
auth_header = {}
if 'Authorization' in request.META:
auth_header = {'Authorization': request.META['Authorization']}
elif 'HTTP_AUTHORIZATION' in request.META:
auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']}
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=auth_header,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
+ # Django converts Authorization header in HTTP_AUTHORIZATION
+ # Warning: it doesn't happen in tests but it's useful, do not remove!
+ auth_header = {}
+ if 'Authorization' in request.META:
+ auth_header = {'Authorization': request.META['Authorization']}
+ elif 'HTTP_AUTHORIZATION' in request.META:
+ auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']}
+
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
- headers=request.META,
? -----------
+ headers=auth_header,
? ++++++++++
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
8d229401ea69799638d8cd005bc4dc87bb4327a4
|
src/mist/io/tests/MyRequestsClass.py
|
src/mist/io/tests/MyRequestsClass.py
|
import requests
class MyRequests(object):
"""
Simple class to make requests with or withour cookies etc.
This way we can have the same request methods both in io and core
"""
def __init__(self, uri, data=None, cookie=None, timeout=None):
self.headers = {'Cookie': cookie}
self.timeout = timeout
self.uri = uri
self.data = data
def post(self):
response = requests.post(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def get(self):
response = requests.get(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def put(self):
response = requests.put(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def delete(self):
response = requests.delete(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
|
import requests
class MyRequests(object):
"""
Simple class to make requests with or withour cookies etc.
This way we can have the same request methods both in io and core
"""
def __init__(self, uri, data=None, cookie=None, timeout=None, csrf=None):
self.headers = {'Cookie': cookie, 'Csrf-Token': csrf}
self.timeout = timeout
self.uri = uri
self.data = data
def post(self):
response = requests.post(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def get(self):
response = requests.get(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def put(self):
response = requests.put(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def delete(self):
response = requests.delete(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
|
Add csrf token in MyRequests class
|
Add csrf token in MyRequests class
|
Python
|
agpl-3.0
|
kelonye/mist.io,munkiat/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,afivos/mist.io,DimensionDataCBUSydney/mist.io,afivos/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,kelonye/mist.io,zBMNForks/mist.io,Lao-liu/mist.io,johnnyWalnut/mist.io,munkiat/mist.io,DimensionDataCBUSydney/mist.io,Lao-liu/mist.io,zBMNForks/mist.io,zBMNForks/mist.io,johnnyWalnut/mist.io,johnnyWalnut/mist.io,afivos/mist.io,munkiat/mist.io,munkiat/mist.io,kelonye/mist.io
|
import requests
class MyRequests(object):
"""
Simple class to make requests with or withour cookies etc.
This way we can have the same request methods both in io and core
"""
- def __init__(self, uri, data=None, cookie=None, timeout=None):
+ def __init__(self, uri, data=None, cookie=None, timeout=None, csrf=None):
- self.headers = {'Cookie': cookie}
+ self.headers = {'Cookie': cookie, 'Csrf-Token': csrf}
self.timeout = timeout
self.uri = uri
self.data = data
def post(self):
response = requests.post(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def get(self):
response = requests.get(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def put(self):
response = requests.put(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def delete(self):
response = requests.delete(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
|
Add csrf token in MyRequests class
|
## Code Before:
import requests
class MyRequests(object):
"""
Simple class to make requests with or withour cookies etc.
This way we can have the same request methods both in io and core
"""
def __init__(self, uri, data=None, cookie=None, timeout=None):
self.headers = {'Cookie': cookie}
self.timeout = timeout
self.uri = uri
self.data = data
def post(self):
response = requests.post(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def get(self):
response = requests.get(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def put(self):
response = requests.put(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def delete(self):
response = requests.delete(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
## Instruction:
Add csrf token in MyRequests class
## Code After:
import requests
class MyRequests(object):
"""
Simple class to make requests with or withour cookies etc.
This way we can have the same request methods both in io and core
"""
def __init__(self, uri, data=None, cookie=None, timeout=None, csrf=None):
self.headers = {'Cookie': cookie, 'Csrf-Token': csrf}
self.timeout = timeout
self.uri = uri
self.data = data
def post(self):
response = requests.post(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def get(self):
response = requests.get(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def put(self):
response = requests.put(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def delete(self):
response = requests.delete(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
|
import requests
class MyRequests(object):
"""
Simple class to make requests with or withour cookies etc.
This way we can have the same request methods both in io and core
"""
- def __init__(self, uri, data=None, cookie=None, timeout=None):
+ def __init__(self, uri, data=None, cookie=None, timeout=None, csrf=None):
? +++++++++++
- self.headers = {'Cookie': cookie}
+ self.headers = {'Cookie': cookie, 'Csrf-Token': csrf}
? ++++++++++++++++++++
self.timeout = timeout
self.uri = uri
self.data = data
def post(self):
response = requests.post(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def get(self):
response = requests.get(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def put(self):
response = requests.put(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def delete(self):
response = requests.delete(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
|
dbfe5fcb87762d68580756d6466bc61fa8ab4a56
|
histomicstk/preprocessing/color_deconvolution/utils.py
|
histomicstk/preprocessing/color_deconvolution/utils.py
|
import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1, 2, 3.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T
__all__ = (
'get_stain_vector',
)
|
import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args, count=3):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1 to count.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T
__all__ = (
'get_stain_vector',
)
|
Enhance get_stain_matrix to take any desired number of vectors
|
Enhance get_stain_matrix to take any desired number of vectors
|
Python
|
apache-2.0
|
DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK
|
import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
- def get_stain_matrix(args):
+ def get_stain_matrix(args, count=3):
"""Get the stain matrix corresponding to the args.stain_$index and
- args.stain_$index_vector arguments for values of index 1, 2, 3.
+ args.stain_$index_vector arguments for values of index 1 to count.
Return a numpy array of column vectors.
"""
- return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T
+ return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T
__all__ = (
'get_stain_vector',
)
|
Enhance get_stain_matrix to take any desired number of vectors
|
## Code Before:
import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1, 2, 3.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T
__all__ = (
'get_stain_vector',
)
## Instruction:
Enhance get_stain_matrix to take any desired number of vectors
## Code After:
import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args, count=3):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1 to count.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T
__all__ = (
'get_stain_vector',
)
|
import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
- def get_stain_matrix(args):
+ def get_stain_matrix(args, count=3):
? +++++++++
"""Get the stain matrix corresponding to the args.stain_$index and
- args.stain_$index_vector arguments for values of index 1, 2, 3.
? - ^^ ^
+ args.stain_$index_vector arguments for values of index 1 to count.
? ^^ ^^^^^
Return a numpy array of column vectors.
"""
- return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T
? ^^^^^^^
+ return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T
? ++ ^^^^^^^^^^^^
__all__ = (
'get_stain_vector',
)
|
76f6497389d2e6588d91fbd7c24d2f368592140b
|
tests/utils.py
|
tests/utils.py
|
import bottle
import threading
import time as _time
def start_bottle_server(app, port, **kwargs):
server_thread = ServerThread(app, port, kwargs)
server_thread.daemon = True
server_thread.start()
_time.sleep(0.1)
class ServerThread(threading.Thread):
def __init__(self, app, port, server_kwargs):
threading.Thread.__init__(self)
self.app = app
self.port = port
self.server_kwargs = server_kwargs
def run(self):
bottle.run(self.app, host='localhost', port=self.port, **self.server_kwargs)
# http://code.activestate.com/recipes/106033-deep-list-to-convert-a-nested-tuple-of-tuples/
def listit(t):
return list(map(listit, t)) if isinstance(t, (list, tuple)) else t
|
import bottle
import threading
import socket
import time as _time
def start_bottle_server(app, port, **kwargs):
server_thread = ServerThread(app, port, kwargs)
server_thread.daemon = True
server_thread.start()
ok = False
for i in range(10):
try:
conn = socket.create_connection(('127.0.0.1', port), 0.1)
ok = True
break
except socket.error as e:
_time.sleep(0.1)
if not ok:
import warnings
warnings.warn('Server did not start after 1 second')
class ServerThread(threading.Thread):
def __init__(self, app, port, server_kwargs):
threading.Thread.__init__(self)
self.app = app
self.port = port
self.server_kwargs = server_kwargs
def run(self):
bottle.run(self.app, host='localhost', port=self.port, **self.server_kwargs)
# http://code.activestate.com/recipes/106033-deep-list-to-convert-a-nested-tuple-of-tuples/
def listit(t):
return list(map(listit, t)) if isinstance(t, (list, tuple)) else t
|
Test server existence via a socket connection
|
Test server existence via a socket connection
|
Python
|
bsd-2-clause
|
p/webracer
|
import bottle
import threading
+ import socket
import time as _time
def start_bottle_server(app, port, **kwargs):
server_thread = ServerThread(app, port, kwargs)
server_thread.daemon = True
server_thread.start()
+
+ ok = False
+ for i in range(10):
+ try:
+ conn = socket.create_connection(('127.0.0.1', port), 0.1)
+ ok = True
+ break
+ except socket.error as e:
- _time.sleep(0.1)
+ _time.sleep(0.1)
+ if not ok:
+ import warnings
+ warnings.warn('Server did not start after 1 second')
class ServerThread(threading.Thread):
def __init__(self, app, port, server_kwargs):
threading.Thread.__init__(self)
self.app = app
self.port = port
self.server_kwargs = server_kwargs
def run(self):
bottle.run(self.app, host='localhost', port=self.port, **self.server_kwargs)
# http://code.activestate.com/recipes/106033-deep-list-to-convert-a-nested-tuple-of-tuples/
def listit(t):
return list(map(listit, t)) if isinstance(t, (list, tuple)) else t
|
Test server existence via a socket connection
|
## Code Before:
import bottle
import threading
import time as _time
def start_bottle_server(app, port, **kwargs):
server_thread = ServerThread(app, port, kwargs)
server_thread.daemon = True
server_thread.start()
_time.sleep(0.1)
class ServerThread(threading.Thread):
def __init__(self, app, port, server_kwargs):
threading.Thread.__init__(self)
self.app = app
self.port = port
self.server_kwargs = server_kwargs
def run(self):
bottle.run(self.app, host='localhost', port=self.port, **self.server_kwargs)
# http://code.activestate.com/recipes/106033-deep-list-to-convert-a-nested-tuple-of-tuples/
def listit(t):
return list(map(listit, t)) if isinstance(t, (list, tuple)) else t
## Instruction:
Test server existence via a socket connection
## Code After:
import bottle
import threading
import socket
import time as _time
def start_bottle_server(app, port, **kwargs):
server_thread = ServerThread(app, port, kwargs)
server_thread.daemon = True
server_thread.start()
ok = False
for i in range(10):
try:
conn = socket.create_connection(('127.0.0.1', port), 0.1)
ok = True
break
except socket.error as e:
_time.sleep(0.1)
if not ok:
import warnings
warnings.warn('Server did not start after 1 second')
class ServerThread(threading.Thread):
def __init__(self, app, port, server_kwargs):
threading.Thread.__init__(self)
self.app = app
self.port = port
self.server_kwargs = server_kwargs
def run(self):
bottle.run(self.app, host='localhost', port=self.port, **self.server_kwargs)
# http://code.activestate.com/recipes/106033-deep-list-to-convert-a-nested-tuple-of-tuples/
def listit(t):
return list(map(listit, t)) if isinstance(t, (list, tuple)) else t
|
import bottle
import threading
+ import socket
import time as _time
def start_bottle_server(app, port, **kwargs):
server_thread = ServerThread(app, port, kwargs)
server_thread.daemon = True
server_thread.start()
+
+ ok = False
+ for i in range(10):
+ try:
+ conn = socket.create_connection(('127.0.0.1', port), 0.1)
+ ok = True
+ break
+ except socket.error as e:
- _time.sleep(0.1)
+ _time.sleep(0.1)
? ++++++++
+ if not ok:
+ import warnings
+ warnings.warn('Server did not start after 1 second')
class ServerThread(threading.Thread):
def __init__(self, app, port, server_kwargs):
threading.Thread.__init__(self)
self.app = app
self.port = port
self.server_kwargs = server_kwargs
def run(self):
bottle.run(self.app, host='localhost', port=self.port, **self.server_kwargs)
# http://code.activestate.com/recipes/106033-deep-list-to-convert-a-nested-tuple-of-tuples/
def listit(t):
return list(map(listit, t)) if isinstance(t, (list, tuple)) else t
|
8440ffcfd87814e04188fe4077717e132f285cb2
|
ckanext/requestdata/tests/test_helpers.py
|
ckanext/requestdata/tests/test_helpers.py
|
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
|
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
def test_convert_id_to_emails_valid(self):
user = factories.User()
users = [{'name': user['name']}]
ids = user['id']
response = h.convert_id_to_email(ids)
email = '[email protected]'
assert email == response
|
Add tests for valid email converter
|
Add tests for valid email converter
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata
|
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
+ def test_convert_id_to_emails_valid(self):
+ user = factories.User()
+ users = [{'name': user['name']}]
+ ids = user['id']
+ response = h.convert_id_to_email(ids)
+ email = '[email protected]'
+ assert email == response
+
+
|
Add tests for valid email converter
|
## Code Before:
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
## Instruction:
Add tests for valid email converter
## Code After:
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
def test_convert_id_to_emails_valid(self):
user = factories.User()
users = [{'name': user['name']}]
ids = user['id']
response = h.convert_id_to_email(ids)
email = '[email protected]'
assert email == response
|
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
+
+ def test_convert_id_to_emails_valid(self):
+ user = factories.User()
+ users = [{'name': user['name']}]
+ ids = user['id']
+ response = h.convert_id_to_email(ids)
+ email = '[email protected]'
+ assert email == response
+
|
c8d2d6a4eace2107639badd17983e048dc9259e5
|
mfh.py
|
mfh.py
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import update
from arguments import parse
def main():
q = Event()
mfhclient_process = Process(
args=(args, q,),
name="mfhclient_process",
target=mfhclient.main,
)
mfhclient_process.start()
trigger_process = Process(
args=(q,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
main()
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import update
from arguments import parse
from settings import HONEYPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
if args.client is not None:
mfhclient_process.start()
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
main()
|
Add condition to only launch client if -c or --client is specified
|
Add condition to only launch client if -c or --client is specified
|
Python
|
mit
|
Zloool/manyfaced-honeypot
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import update
from arguments import parse
+ from settings import HONEYPORT
def main():
- q = Event()
+ update_event = Event()
mfhclient_process = Process(
- args=(args, q,),
+ args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
+ if args.client is not None:
- mfhclient_process.start()
+ mfhclient_process.start()
trigger_process = Process(
- args=(q,),
+ args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
+ if args.c:
+ args.client = HONEYPORT
main()
|
Add condition to only launch client if -c or --client is specified
|
## Code Before:
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import update
from arguments import parse
def main():
q = Event()
mfhclient_process = Process(
args=(args, q,),
name="mfhclient_process",
target=mfhclient.main,
)
mfhclient_process.start()
trigger_process = Process(
args=(q,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
main()
## Instruction:
Add condition to only launch client if -c or --client is specified
## Code After:
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import update
from arguments import parse
from settings import HONEYPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
if args.client is not None:
mfhclient_process.start()
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
main()
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import update
from arguments import parse
+ from settings import HONEYPORT
def main():
- q = Event()
+ update_event = Event()
mfhclient_process = Process(
- args=(args, q,),
? ^
+ args=(args, update_event,),
? ^^^^^^^^^^^^
name="mfhclient_process",
target=mfhclient.main,
)
+ if args.client is not None:
- mfhclient_process.start()
+ mfhclient_process.start()
? ++++
trigger_process = Process(
- args=(q,),
+ args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
+ if args.c:
+ args.client = HONEYPORT
main()
|
47d9217b6ee9837987d25d77cc6e3c750766ed90
|
tests/test_formats.py
|
tests/test_formats.py
|
from contextlib import contextmanager
from tempfile import TemporaryDirectory
from django.core.management import call_command
from django.test import TestCase
from django_archive import archivers
class FormatsTestCase(TestCase):
"""
Test that the archive command works with all available formats
"""
_FORMATS = (
archivers.TARBALL,
archivers.TARBALL_GZ,
archivers.TARBALL_BZ2,
archivers.TARBALL_XZ,
archivers.ZIP,
)
@contextmanager
def _wrap_in_temp_dir(self):
with TemporaryDirectory() as directory:
yield self.settings(ARCHIVE_DIRECTORY=directory)
def test_archive(self):
"""
Test each format
"""
for fmt in self._FORMATS:
with self.subTest(fmt=fmt):
with self._wrap_in_temp_dir():
with self.settings(ARCHIVE_FORMAT=fmt):
call_command('archive')
|
from contextlib import contextmanager
from tempfile import TemporaryDirectory
from django.core.management import call_command
from django.test import TestCase
from django_archive import archivers
class FormatsTestCase(TestCase):
"""
Test that the archive command works with all available formats
"""
_FORMATS = (
archivers.TARBALL,
archivers.TARBALL_GZ,
archivers.TARBALL_BZ2,
archivers.TARBALL_XZ,
archivers.ZIP,
)
@contextmanager
def _wrap_in_temp_dir(self):
with TemporaryDirectory() as directory:
with self.settings(ARCHIVE_DIRECTORY=directory):
yield None
def test_archive(self):
"""
Test each format
"""
for fmt in self._FORMATS:
with self.subTest(fmt=fmt):
with self._wrap_in_temp_dir():
with self.settings(ARCHIVE_FORMAT=fmt):
call_command('archive')
|
Fix bug in temporary directory generation.
|
Fix bug in temporary directory generation.
|
Python
|
mit
|
nathan-osman/django-archive,nathan-osman/django-archive
|
from contextlib import contextmanager
from tempfile import TemporaryDirectory
from django.core.management import call_command
from django.test import TestCase
from django_archive import archivers
class FormatsTestCase(TestCase):
"""
Test that the archive command works with all available formats
"""
_FORMATS = (
archivers.TARBALL,
archivers.TARBALL_GZ,
archivers.TARBALL_BZ2,
archivers.TARBALL_XZ,
archivers.ZIP,
)
@contextmanager
def _wrap_in_temp_dir(self):
with TemporaryDirectory() as directory:
- yield self.settings(ARCHIVE_DIRECTORY=directory)
+ with self.settings(ARCHIVE_DIRECTORY=directory):
+ yield None
def test_archive(self):
"""
Test each format
"""
for fmt in self._FORMATS:
with self.subTest(fmt=fmt):
with self._wrap_in_temp_dir():
with self.settings(ARCHIVE_FORMAT=fmt):
call_command('archive')
|
Fix bug in temporary directory generation.
|
## Code Before:
from contextlib import contextmanager
from tempfile import TemporaryDirectory
from django.core.management import call_command
from django.test import TestCase
from django_archive import archivers
class FormatsTestCase(TestCase):
"""
Test that the archive command works with all available formats
"""
_FORMATS = (
archivers.TARBALL,
archivers.TARBALL_GZ,
archivers.TARBALL_BZ2,
archivers.TARBALL_XZ,
archivers.ZIP,
)
@contextmanager
def _wrap_in_temp_dir(self):
with TemporaryDirectory() as directory:
yield self.settings(ARCHIVE_DIRECTORY=directory)
def test_archive(self):
"""
Test each format
"""
for fmt in self._FORMATS:
with self.subTest(fmt=fmt):
with self._wrap_in_temp_dir():
with self.settings(ARCHIVE_FORMAT=fmt):
call_command('archive')
## Instruction:
Fix bug in temporary directory generation.
## Code After:
from contextlib import contextmanager
from tempfile import TemporaryDirectory
from django.core.management import call_command
from django.test import TestCase
from django_archive import archivers
class FormatsTestCase(TestCase):
"""
Test that the archive command works with all available formats
"""
_FORMATS = (
archivers.TARBALL,
archivers.TARBALL_GZ,
archivers.TARBALL_BZ2,
archivers.TARBALL_XZ,
archivers.ZIP,
)
@contextmanager
def _wrap_in_temp_dir(self):
with TemporaryDirectory() as directory:
with self.settings(ARCHIVE_DIRECTORY=directory):
yield None
def test_archive(self):
"""
Test each format
"""
for fmt in self._FORMATS:
with self.subTest(fmt=fmt):
with self._wrap_in_temp_dir():
with self.settings(ARCHIVE_FORMAT=fmt):
call_command('archive')
|
from contextlib import contextmanager
from tempfile import TemporaryDirectory
from django.core.management import call_command
from django.test import TestCase
from django_archive import archivers
class FormatsTestCase(TestCase):
"""
Test that the archive command works with all available formats
"""
_FORMATS = (
archivers.TARBALL,
archivers.TARBALL_GZ,
archivers.TARBALL_BZ2,
archivers.TARBALL_XZ,
archivers.ZIP,
)
@contextmanager
def _wrap_in_temp_dir(self):
with TemporaryDirectory() as directory:
- yield self.settings(ARCHIVE_DIRECTORY=directory)
? ^ ^^^
+ with self.settings(ARCHIVE_DIRECTORY=directory):
? ^ ^^ +
+ yield None
def test_archive(self):
"""
Test each format
"""
for fmt in self._FORMATS:
with self.subTest(fmt=fmt):
with self._wrap_in_temp_dir():
with self.settings(ARCHIVE_FORMAT=fmt):
call_command('archive')
|
dd248a14a40dea03458985640571bccf9b38b030
|
conftest.py
|
conftest.py
|
import pytest
import compas
import math
import numpy
def pytest_ignore_collect(path):
if "rhino" in str(path):
return True
if "blender" in str(path):
return True
if "ghpython" in str(path):
return True
if "matlab" in str(path):
return True
if "robots" in str(path):
return True
if str(path).endswith('_cli.py'):
return True
@pytest.fixture(autouse=True)
def add_compas(doctest_namespace):
doctest_namespace["compas"] = compas
@pytest.fixture(autouse=True)
def add_math(doctest_namespace):
doctest_namespace["math"] = math
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace["np"] = numpy
|
import pytest
import compas
import math
import numpy
def pytest_ignore_collect(path):
if "rhino" in str(path):
return True
if "blender" in str(path):
return True
if "ghpython" in str(path):
return True
if "matlab" in str(path):
return True
if str(path).endswith('_cli.py'):
return True
@pytest.fixture(autouse=True)
def add_compas(doctest_namespace):
doctest_namespace["compas"] = compas
@pytest.fixture(autouse=True)
def add_math(doctest_namespace):
doctest_namespace["math"] = math
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace["np"] = numpy
|
Remove robots from pytest path ignore, as requested by @gonzalocasas.
|
Remove robots from pytest path ignore, as requested by @gonzalocasas.
|
Python
|
mit
|
compas-dev/compas
|
import pytest
import compas
import math
import numpy
def pytest_ignore_collect(path):
if "rhino" in str(path):
return True
if "blender" in str(path):
return True
if "ghpython" in str(path):
return True
if "matlab" in str(path):
- return True
-
- if "robots" in str(path):
return True
if str(path).endswith('_cli.py'):
return True
@pytest.fixture(autouse=True)
def add_compas(doctest_namespace):
doctest_namespace["compas"] = compas
@pytest.fixture(autouse=True)
def add_math(doctest_namespace):
doctest_namespace["math"] = math
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace["np"] = numpy
|
Remove robots from pytest path ignore, as requested by @gonzalocasas.
|
## Code Before:
import pytest
import compas
import math
import numpy
def pytest_ignore_collect(path):
if "rhino" in str(path):
return True
if "blender" in str(path):
return True
if "ghpython" in str(path):
return True
if "matlab" in str(path):
return True
if "robots" in str(path):
return True
if str(path).endswith('_cli.py'):
return True
@pytest.fixture(autouse=True)
def add_compas(doctest_namespace):
doctest_namespace["compas"] = compas
@pytest.fixture(autouse=True)
def add_math(doctest_namespace):
doctest_namespace["math"] = math
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace["np"] = numpy
## Instruction:
Remove robots from pytest path ignore, as requested by @gonzalocasas.
## Code After:
import pytest
import compas
import math
import numpy
def pytest_ignore_collect(path):
if "rhino" in str(path):
return True
if "blender" in str(path):
return True
if "ghpython" in str(path):
return True
if "matlab" in str(path):
return True
if str(path).endswith('_cli.py'):
return True
@pytest.fixture(autouse=True)
def add_compas(doctest_namespace):
doctest_namespace["compas"] = compas
@pytest.fixture(autouse=True)
def add_math(doctest_namespace):
doctest_namespace["math"] = math
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace["np"] = numpy
|
import pytest
import compas
import math
import numpy
def pytest_ignore_collect(path):
if "rhino" in str(path):
return True
if "blender" in str(path):
return True
if "ghpython" in str(path):
return True
if "matlab" in str(path):
return True
- if "robots" in str(path):
- return True
-
if str(path).endswith('_cli.py'):
return True
@pytest.fixture(autouse=True)
def add_compas(doctest_namespace):
doctest_namespace["compas"] = compas
@pytest.fixture(autouse=True)
def add_math(doctest_namespace):
doctest_namespace["math"] = math
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace["np"] = numpy
|
6da69eb8f13dc56cc19d06a09d74005395de8989
|
fedmsg_meta_umb/tps.py
|
fedmsg_meta_umb/tps.py
|
from fedmsg.meta.base import BaseProcessor
class TPSProcessor(BaseProcessor):
topic_prefix_re = r'/topic/VirtualTopic\.eng'
__name__ = 'tps'
def title(self, msg, **config):
return msg['topic'].split('.', 2)[-1]
def packages(self, msg, **config):
return set([msg['headers']['component'].rsplit('-', 2)[0]])
|
from fedmsg.meta.base import BaseProcessor
class TPSProcessor(BaseProcessor):
topic_prefix_re = r'/topic/VirtualTopic\.eng'
__name__ = 'tps'
__description__ = 'package sanity testing of brew builds'
__obj__ = 'Test Package Sanity'
__docs__ = 'https://mojo.redhat.com/docs/DOC-0000000'
__link__ = 'https://sometpslink.engineering.redhat.com'
def title(self, msg, **config):
return msg['topic'].split('.', 2)[-1]
def packages(self, msg, **config):
return set([msg['headers']['component'].rsplit('-', 2)[0]])
|
Add missing attributes in TPSProcessor.
|
Add missing attributes in TPSProcessor.
Signed-off-by: shanks <[email protected]>
|
Python
|
lgpl-2.1
|
release-engineering/fedmsg_meta_umb
|
from fedmsg.meta.base import BaseProcessor
class TPSProcessor(BaseProcessor):
topic_prefix_re = r'/topic/VirtualTopic\.eng'
__name__ = 'tps'
+ __description__ = 'package sanity testing of brew builds'
+ __obj__ = 'Test Package Sanity'
+ __docs__ = 'https://mojo.redhat.com/docs/DOC-0000000'
+ __link__ = 'https://sometpslink.engineering.redhat.com'
def title(self, msg, **config):
return msg['topic'].split('.', 2)[-1]
def packages(self, msg, **config):
return set([msg['headers']['component'].rsplit('-', 2)[0]])
|
Add missing attributes in TPSProcessor.
|
## Code Before:
from fedmsg.meta.base import BaseProcessor
class TPSProcessor(BaseProcessor):
topic_prefix_re = r'/topic/VirtualTopic\.eng'
__name__ = 'tps'
def title(self, msg, **config):
return msg['topic'].split('.', 2)[-1]
def packages(self, msg, **config):
return set([msg['headers']['component'].rsplit('-', 2)[0]])
## Instruction:
Add missing attributes in TPSProcessor.
## Code After:
from fedmsg.meta.base import BaseProcessor
class TPSProcessor(BaseProcessor):
topic_prefix_re = r'/topic/VirtualTopic\.eng'
__name__ = 'tps'
__description__ = 'package sanity testing of brew builds'
__obj__ = 'Test Package Sanity'
__docs__ = 'https://mojo.redhat.com/docs/DOC-0000000'
__link__ = 'https://sometpslink.engineering.redhat.com'
def title(self, msg, **config):
return msg['topic'].split('.', 2)[-1]
def packages(self, msg, **config):
return set([msg['headers']['component'].rsplit('-', 2)[0]])
|
from fedmsg.meta.base import BaseProcessor
class TPSProcessor(BaseProcessor):
topic_prefix_re = r'/topic/VirtualTopic\.eng'
__name__ = 'tps'
+ __description__ = 'package sanity testing of brew builds'
+ __obj__ = 'Test Package Sanity'
+ __docs__ = 'https://mojo.redhat.com/docs/DOC-0000000'
+ __link__ = 'https://sometpslink.engineering.redhat.com'
def title(self, msg, **config):
return msg['topic'].split('.', 2)[-1]
def packages(self, msg, **config):
return set([msg['headers']['component'].rsplit('-', 2)[0]])
|
68c7db19c0ac8c159bc12ff9714dea068a7835e4
|
importlib_resources/__init__.py
|
importlib_resources/__init__.py
|
"""Read resources contained within a package."""
import sys
__all__ = [
'Package',
'Resource',
'ResourceReader',
'contents',
'is_resource',
'open_binary',
'open_text',
'path',
'read_binary',
'read_text',
]
if sys.version_info >= (3,):
from importlib_resources._py3 import (
Package,
Resource,
contents,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
from importlib_resources.abc import ResourceReader
else:
from importlib_resources._py2 import (
contents,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
del __all__[:3]
__version__ = read_text('importlib_resources', 'version.txt').strip()
|
"""Read resources contained within a package."""
import sys
__all__ = [
'Package',
'Resource',
'ResourceReader',
'contents',
'files',
'is_resource',
'open_binary',
'open_text',
'path',
'read_binary',
'read_text',
]
if sys.version_info >= (3,):
from importlib_resources._py3 import (
Package,
Resource,
contents,
files,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
from importlib_resources.abc import ResourceReader
else:
from importlib_resources._py2 import (
contents,
files,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
del __all__[:3]
__version__ = read_text('importlib_resources', 'version.txt').strip()
|
Add files to the exported names.
|
Add files to the exported names.
|
Python
|
apache-2.0
|
python/importlib_resources
|
"""Read resources contained within a package."""
import sys
__all__ = [
'Package',
'Resource',
'ResourceReader',
'contents',
+ 'files',
'is_resource',
'open_binary',
'open_text',
'path',
'read_binary',
'read_text',
]
if sys.version_info >= (3,):
from importlib_resources._py3 import (
Package,
Resource,
contents,
+ files,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
from importlib_resources.abc import ResourceReader
else:
from importlib_resources._py2 import (
contents,
+ files,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
del __all__[:3]
__version__ = read_text('importlib_resources', 'version.txt').strip()
|
Add files to the exported names.
|
## Code Before:
"""Read resources contained within a package."""
import sys
__all__ = [
'Package',
'Resource',
'ResourceReader',
'contents',
'is_resource',
'open_binary',
'open_text',
'path',
'read_binary',
'read_text',
]
if sys.version_info >= (3,):
from importlib_resources._py3 import (
Package,
Resource,
contents,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
from importlib_resources.abc import ResourceReader
else:
from importlib_resources._py2 import (
contents,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
del __all__[:3]
__version__ = read_text('importlib_resources', 'version.txt').strip()
## Instruction:
Add files to the exported names.
## Code After:
"""Read resources contained within a package."""
import sys
__all__ = [
'Package',
'Resource',
'ResourceReader',
'contents',
'files',
'is_resource',
'open_binary',
'open_text',
'path',
'read_binary',
'read_text',
]
if sys.version_info >= (3,):
from importlib_resources._py3 import (
Package,
Resource,
contents,
files,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
from importlib_resources.abc import ResourceReader
else:
from importlib_resources._py2 import (
contents,
files,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
del __all__[:3]
__version__ = read_text('importlib_resources', 'version.txt').strip()
|
"""Read resources contained within a package."""
import sys
__all__ = [
'Package',
'Resource',
'ResourceReader',
'contents',
+ 'files',
'is_resource',
'open_binary',
'open_text',
'path',
'read_binary',
'read_text',
]
if sys.version_info >= (3,):
from importlib_resources._py3 import (
Package,
Resource,
contents,
+ files,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
from importlib_resources.abc import ResourceReader
else:
from importlib_resources._py2 import (
contents,
+ files,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
del __all__[:3]
__version__ = read_text('importlib_resources', 'version.txt').strip()
|
1d84a3b58aa752834aed31123dd16e3bfa723609
|
tests/storage_adapter_tests/test_storage_adapter.py
|
tests/storage_adapter_tests/test_storage_adapter.py
|
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_find(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.find('')
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_get_response_statements(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_response_statements()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
|
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
|
Remove tests for storage adapter methods being removed.
|
Remove tests for storage adapter methods being removed.
|
Python
|
bsd-3-clause
|
vkosuri/ChatterBot,gunthercox/ChatterBot
|
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
-
- def test_find(self):
- with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
- self.adapter.find('')
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
- def test_get_response_statements(self):
- with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
- self.adapter.get_response_statements()
-
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
|
Remove tests for storage adapter methods being removed.
|
## Code Before:
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_find(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.find('')
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_get_response_statements(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_response_statements()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
## Instruction:
Remove tests for storage adapter methods being removed.
## Code After:
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
|
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
-
- def test_find(self):
- with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
- self.adapter.find('')
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
- def test_get_response_statements(self):
- with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
- self.adapter.get_response_statements()
-
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
|
412d84fd08f55e20a23314cb09a8e49751df38c2
|
setup.py
|
setup.py
|
from distutils.core import Extension, setup
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
from distutils.core import Extension, setup
try:
from Cython.Build import cythonize
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = cythonize([
Extension('mathix.vector', ['mathix/vector.pyx']),
])
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
Use "cythonize" if Cython is installed.
|
Use "cythonize" if Cython is installed.
|
Python
|
mit
|
PeithVergil/cython-example
|
from distutils.core import Extension, setup
try:
+ from Cython.Build import cythonize
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
- extensions = [
+ extensions = cythonize([
Extension('mathix.vector', ['mathix/vector.pyx']),
- ]
+ ])
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
Use "cythonize" if Cython is installed.
|
## Code Before:
from distutils.core import Extension, setup
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
## Instruction:
Use "cythonize" if Cython is installed.
## Code After:
from distutils.core import Extension, setup
try:
from Cython.Build import cythonize
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = cythonize([
Extension('mathix.vector', ['mathix/vector.pyx']),
])
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
from distutils.core import Extension, setup
try:
+ from Cython.Build import cythonize
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
- extensions = [
+ extensions = cythonize([
? ++++++++++
Extension('mathix.vector', ['mathix/vector.pyx']),
- ]
+ ])
? +
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
35b5215cd16493fea00c7ebb2106c633ce4c6a9b
|
qutebrowser/config.py
|
qutebrowser/config.py
|
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
Use Arial as Fantasy font
|
qutebrowser: Use Arial as Fantasy font
|
Python
|
mit
|
The-Compiler/dotfiles,The-Compiler/dotfiles,The-Compiler/dotfiles
|
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
+ c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
Use Arial as Fantasy font
|
## Code Before:
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
## Instruction:
Use Arial as Fantasy font
## Code After:
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
+ c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
6196c1fe13df88c1d9f1fe706120c175ab890a1d
|
gen_tone.py
|
gen_tone.py
|
import math
import numpy
from demodulate.cfg import *
def gen_tone(pattern, WPM):
cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ
radians_per_sample = cycles_per_sample * 2 * math.pi
elements_per_second = WPM * 50.0 / 60.0
samples_per_element = int(SAMPLE_FREQ/elements_per_second)
length = samples_per_element * len(pattern)
# Empty returns array containing random stuff, so we NEED to overwrite it
data = numpy.empty(length, dtype=numpy.float32)
for i in xrange(length):
keyed = pattern[int(i/samples_per_element)]
#keyed = 1
data[i] = 0 if not keyed else (radians_per_sample * i)
data = numpy.sin(data)
return data
|
import math
import numpy
from demodulate.cfg import *
def gen_tone(pattern, WPM):
cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ
radians_per_sample = cycles_per_sample * 2 * math.pi
elements_per_second = WPM * 50.0 / 60.0
samples_per_element = int(SAMPLE_FREQ/elements_per_second)
length = samples_per_element * len(pattern)
# Empty returns array containing random stuff, so we NEED to overwrite it
data = numpy.empty(length, dtype=numpy.float32)
for i in xrange(length):
keyed = pattern[int(i/samples_per_element)]
#keyed = 1
data[i] = 0 if not keyed else (radians_per_sample * i)
data = numpy.sin(data)
data *= 2**16-1
data = numpy.array(data, dtype=numpy.int16)
return data
|
Use 16 bit samples instead of float
|
Use 16 bit samples instead of float
|
Python
|
mit
|
nickodell/morse-code
|
import math
import numpy
from demodulate.cfg import *
def gen_tone(pattern, WPM):
cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ
radians_per_sample = cycles_per_sample * 2 * math.pi
elements_per_second = WPM * 50.0 / 60.0
samples_per_element = int(SAMPLE_FREQ/elements_per_second)
length = samples_per_element * len(pattern)
# Empty returns array containing random stuff, so we NEED to overwrite it
data = numpy.empty(length, dtype=numpy.float32)
for i in xrange(length):
keyed = pattern[int(i/samples_per_element)]
#keyed = 1
data[i] = 0 if not keyed else (radians_per_sample * i)
data = numpy.sin(data)
-
+ data *= 2**16-1
+ data = numpy.array(data, dtype=numpy.int16)
return data
|
Use 16 bit samples instead of float
|
## Code Before:
import math
import numpy
from demodulate.cfg import *
def gen_tone(pattern, WPM):
cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ
radians_per_sample = cycles_per_sample * 2 * math.pi
elements_per_second = WPM * 50.0 / 60.0
samples_per_element = int(SAMPLE_FREQ/elements_per_second)
length = samples_per_element * len(pattern)
# Empty returns array containing random stuff, so we NEED to overwrite it
data = numpy.empty(length, dtype=numpy.float32)
for i in xrange(length):
keyed = pattern[int(i/samples_per_element)]
#keyed = 1
data[i] = 0 if not keyed else (radians_per_sample * i)
data = numpy.sin(data)
return data
## Instruction:
Use 16 bit samples instead of float
## Code After:
import math
import numpy
from demodulate.cfg import *
def gen_tone(pattern, WPM):
cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ
radians_per_sample = cycles_per_sample * 2 * math.pi
elements_per_second = WPM * 50.0 / 60.0
samples_per_element = int(SAMPLE_FREQ/elements_per_second)
length = samples_per_element * len(pattern)
# Empty returns array containing random stuff, so we NEED to overwrite it
data = numpy.empty(length, dtype=numpy.float32)
for i in xrange(length):
keyed = pattern[int(i/samples_per_element)]
#keyed = 1
data[i] = 0 if not keyed else (radians_per_sample * i)
data = numpy.sin(data)
data *= 2**16-1
data = numpy.array(data, dtype=numpy.int16)
return data
|
import math
import numpy
from demodulate.cfg import *
def gen_tone(pattern, WPM):
cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ
radians_per_sample = cycles_per_sample * 2 * math.pi
elements_per_second = WPM * 50.0 / 60.0
samples_per_element = int(SAMPLE_FREQ/elements_per_second)
length = samples_per_element * len(pattern)
# Empty returns array containing random stuff, so we NEED to overwrite it
data = numpy.empty(length, dtype=numpy.float32)
for i in xrange(length):
keyed = pattern[int(i/samples_per_element)]
#keyed = 1
data[i] = 0 if not keyed else (radians_per_sample * i)
data = numpy.sin(data)
-
+ data *= 2**16-1
+ data = numpy.array(data, dtype=numpy.int16)
return data
|
0ce14be170e09530b225f2f7526ad68ee1758095
|
peering/migrations/0027_auto_20190105_1600.py
|
peering/migrations/0027_auto_20190105_1600.py
|
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
)
]
|
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
def forwards_func(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=None
).update(potential_internet_exchange_peering_sessions=[])
def reverse_func(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=[]
).update(potential_internet_exchange_peering_sessions=None)
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
),
migrations.RunPython(forwards_func, reverse_func),
]
|
Fix issue with migrations introduced lately.
|
Fix issue with migrations introduced lately.
|
Python
|
apache-2.0
|
respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager
|
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
+ def forwards_func(apps, schema_editor):
+ AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
+ db_alias = schema_editor.connection.alias
+ AutonomousSystem.objects.using(db_alias).filter(
+ potential_internet_exchange_peering_sessions=None
+ ).update(potential_internet_exchange_peering_sessions=[])
+
+ def reverse_func(apps, schema_editor):
+ AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
+ db_alias = schema_editor.connection.alias
+ AutonomousSystem.objects.using(db_alias).filter(
+ potential_internet_exchange_peering_sessions=[]
+ ).update(potential_internet_exchange_peering_sessions=None)
+
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
- )
+ ),
+ migrations.RunPython(forwards_func, reverse_func),
]
|
Fix issue with migrations introduced lately.
|
## Code Before:
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
)
]
## Instruction:
Fix issue with migrations introduced lately.
## Code After:
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
def forwards_func(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=None
).update(potential_internet_exchange_peering_sessions=[])
def reverse_func(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=[]
).update(potential_internet_exchange_peering_sessions=None)
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
),
migrations.RunPython(forwards_func, reverse_func),
]
|
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
+ def forwards_func(apps, schema_editor):
+ AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
+ db_alias = schema_editor.connection.alias
+ AutonomousSystem.objects.using(db_alias).filter(
+ potential_internet_exchange_peering_sessions=None
+ ).update(potential_internet_exchange_peering_sessions=[])
+
+ def reverse_func(apps, schema_editor):
+ AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
+ db_alias = schema_editor.connection.alias
+ AutonomousSystem.objects.using(db_alias).filter(
+ potential_internet_exchange_peering_sessions=[]
+ ).update(potential_internet_exchange_peering_sessions=None)
+
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
- )
+ ),
? +
+ migrations.RunPython(forwards_func, reverse_func),
]
|
d95eda2f88a8b493e40cd6628c7e532a1f510610
|
src/dashboard/src/main/urls.py
|
src/dashboard/src/main/urls.py
|
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template, redirect_to
UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}'
urlpatterns = patterns('dashboard.main.views',
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
# Index
(r'', redirect_to, {'url': '/ingest/'}),
)
|
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template, redirect_to
UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}'
urlpatterns = patterns('dashboard.main.views',
# Index
(r'^$', redirect_to, {'url': '/ingest/'}),
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/list-objects/$', 'list_objects'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
)
|
Remove default route because it is not the desired behavior.
|
Remove default route because it is not the desired behavior.
Autoconverted from SVN (revision:1409)
|
Python
|
agpl-3.0
|
artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history
|
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template, redirect_to
UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}'
urlpatterns = patterns('dashboard.main.views',
+ # Index
+ (r'^$', redirect_to, {'url': '/ingest/'}),
+
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
+ (r'jobs/(?P<uuid>' + UUID_REGEX + ')/list-objects/$', 'list_objects'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
- # Index
- (r'', redirect_to, {'url': '/ingest/'}),
-
)
|
Remove default route because it is not the desired behavior.
|
## Code Before:
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template, redirect_to
UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}'
urlpatterns = patterns('dashboard.main.views',
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
# Index
(r'', redirect_to, {'url': '/ingest/'}),
)
## Instruction:
Remove default route because it is not the desired behavior.
## Code After:
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template, redirect_to
UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}'
urlpatterns = patterns('dashboard.main.views',
# Index
(r'^$', redirect_to, {'url': '/ingest/'}),
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/list-objects/$', 'list_objects'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
)
|
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template, redirect_to
UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}'
urlpatterns = patterns('dashboard.main.views',
+ # Index
+ (r'^$', redirect_to, {'url': '/ingest/'}),
+
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
+ (r'jobs/(?P<uuid>' + UUID_REGEX + ')/list-objects/$', 'list_objects'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
- # Index
- (r'', redirect_to, {'url': '/ingest/'}),
-
)
|
b6dcb4029d3bf4b402a6874c942c9e4a105f2a62
|
tracker_project/tracker_project/urls.py
|
tracker_project/tracker_project/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout'),
url(r'^', 'tracker_project.views.home', name='home')
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.core.urlresolvers import reverse_lazy
urlpatterns = patterns(
'',
url(r'^$', 'tracker_project.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(
r'^logout/$',
'django.contrib.auth.views.logout',
{'next_page': reverse_lazy('home')},
name='logout'
),
url(r'^tracker/', include('tracker.urls', 'tracker')),
)
|
Fix login and logout URLs
|
Fix login and logout URLs
|
Python
|
mit
|
abarto/tracker_project,abarto/tracker_project,abarto/tracker_project,vivek8943/tracker_project,vivek8943/tracker_project,vivek8943/tracker_project
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
+ from django.core.urlresolvers import reverse_lazy
urlpatterns = patterns(
'',
+ url(r'^$', 'tracker_project.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
- url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout'),
- url(r'^', 'tracker_project.views.home', name='home')
+ url(
+ r'^logout/$',
+ 'django.contrib.auth.views.logout',
+ {'next_page': reverse_lazy('home')},
+ name='logout'
+ ),
+ url(r'^tracker/', include('tracker.urls', 'tracker')),
)
|
Fix login and logout URLs
|
## Code Before:
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout'),
url(r'^', 'tracker_project.views.home', name='home')
)
## Instruction:
Fix login and logout URLs
## Code After:
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.core.urlresolvers import reverse_lazy
urlpatterns = patterns(
'',
url(r'^$', 'tracker_project.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(
r'^logout/$',
'django.contrib.auth.views.logout',
{'next_page': reverse_lazy('home')},
name='logout'
),
url(r'^tracker/', include('tracker.urls', 'tracker')),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
+ from django.core.urlresolvers import reverse_lazy
urlpatterns = patterns(
'',
+ url(r'^$', 'tracker_project.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
- url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout'),
- url(r'^', 'tracker_project.views.home', name='home')
+ url(
+ r'^logout/$',
+ 'django.contrib.auth.views.logout',
+ {'next_page': reverse_lazy('home')},
+ name='logout'
+ ),
+ url(r'^tracker/', include('tracker.urls', 'tracker')),
)
|
eaea466e29725c04ccb31a24807668dee1a09a91
|
courses/developingapps/python/devenv/server.py
|
courses/developingapps/python/devenv/server.py
|
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/plain')
self.end_headers()
self.wfile.write('Hello GCP dev!')
return
def run():
print('Server is starting...')
server_address = ('0.0.0.0', 80)
server = HTTPServer(server_address, SimpleHTTPRequestHandler)
print('Started. Press Ctrl + C to stop')
server.serve_forever()
if __name__ == '__main__':
run()
|
try:
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
except ImportError:
from http.server import BaseHTTPRequestHandler, HTTPServer
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/plain')
self.end_headers()
self.wfile.write(b'Hello GCP dev!')
return
def run():
print('Server is starting...')
server_address = ('0.0.0.0', 80)
server = HTTPServer(server_address, SimpleHTTPRequestHandler)
print('Started. Press Ctrl + C to stop')
server.serve_forever()
if __name__ == '__main__':
run()
|
Fix ImportError and use bytes in outstream
|
Fix ImportError and use bytes in outstream
|
Python
|
apache-2.0
|
turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst,GoogleCloudPlatform/training-data-analyst,turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst,GoogleCloudPlatform/training-data-analyst,GoogleCloudPlatform/training-data-analyst,turbomanage/training-data-analyst,turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst,turbomanage/training-data-analyst,turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst,turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst,GoogleCloudPlatform/training-data-analyst,GoogleCloudPlatform/training-data-analyst,turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst
|
+ try:
- from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+ from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+ except ImportError:
+ from http.server import BaseHTTPRequestHandler, HTTPServer
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/plain')
self.end_headers()
- self.wfile.write('Hello GCP dev!')
+ self.wfile.write(b'Hello GCP dev!')
return
def run():
print('Server is starting...')
server_address = ('0.0.0.0', 80)
server = HTTPServer(server_address, SimpleHTTPRequestHandler)
print('Started. Press Ctrl + C to stop')
server.serve_forever()
if __name__ == '__main__':
run()
+
|
Fix ImportError and use bytes in outstream
|
## Code Before:
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/plain')
self.end_headers()
self.wfile.write('Hello GCP dev!')
return
def run():
print('Server is starting...')
server_address = ('0.0.0.0', 80)
server = HTTPServer(server_address, SimpleHTTPRequestHandler)
print('Started. Press Ctrl + C to stop')
server.serve_forever()
if __name__ == '__main__':
run()
## Instruction:
Fix ImportError and use bytes in outstream
## Code After:
try:
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
except ImportError:
from http.server import BaseHTTPRequestHandler, HTTPServer
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/plain')
self.end_headers()
self.wfile.write(b'Hello GCP dev!')
return
def run():
print('Server is starting...')
server_address = ('0.0.0.0', 80)
server = HTTPServer(server_address, SimpleHTTPRequestHandler)
print('Started. Press Ctrl + C to stop')
server.serve_forever()
if __name__ == '__main__':
run()
|
+ try:
- from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+ from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
? ++
+ except ImportError:
+ from http.server import BaseHTTPRequestHandler, HTTPServer
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/plain')
self.end_headers()
- self.wfile.write('Hello GCP dev!')
+ self.wfile.write(b'Hello GCP dev!')
? +
return
def run():
print('Server is starting...')
server_address = ('0.0.0.0', 80)
server = HTTPServer(server_address, SimpleHTTPRequestHandler)
print('Started. Press Ctrl + C to stop')
server.serve_forever()
if __name__ == '__main__':
run()
|
135a97a58a95c04d2635fff68d2c080413f1d804
|
tests/test_conditions.py
|
tests/test_conditions.py
|
import json
import unittest
import awacs.aws as aws
import awacs.s3 as s3
class TestConditions(unittest.TestCase):
def test_for_all_values(self):
c = aws.Condition(
aws.ForAllValuesStringLike(
"dynamodb:requestedAttributes", ["PostDateTime", "Message", "Tags"]
)
)
pd = aws.PolicyDocument(
Statement=[
aws.Statement(
Action=[s3.ListBucket],
Effect=aws.Allow,
Resource=[s3.ARN("myBucket")],
Condition=c,
)
]
)
self.assertEqual(
{
u"Statement": [
{
u"Action": [u"s3:ListBucket"],
u"Condition": {
u"ForAllValues:StringLike": {
u"dynamodb:requestedAttributes": [
u"PostDateTime",
u"Message",
u"Tags",
]
}
},
u"Effect": u"Allow",
u"Resource": [u"arn:aws:s3:::myBucket"],
}
]
},
json.loads(pd.to_json()),
)
|
import json
import unittest
import awacs.aws as aws
import awacs.s3 as s3
class TestConditions(unittest.TestCase):
def test_for_all_values(self):
c = aws.Condition(
aws.ForAllValuesStringLike(
"dynamodb:requestedAttributes", ["PostDateTime", "Message", "Tags"]
)
)
pd = aws.PolicyDocument(
Statement=[
aws.Statement(
Action=[s3.ListBucket],
Effect=aws.Allow,
Resource=[s3.ARN("myBucket")],
Condition=c,
)
]
)
self.assertEqual(
{
"Statement": [
{
"Action": ["s3:ListBucket"],
"Condition": {
"ForAllValues:StringLike": {
"dynamodb:requestedAttributes": [
"PostDateTime",
"Message",
"Tags",
]
}
},
"Effect": "Allow",
"Resource": ["arn:aws:s3:::myBucket"],
}
]
},
json.loads(pd.to_json()),
)
|
Remove 'u' prefix from strings
|
Remove 'u' prefix from strings
|
Python
|
bsd-2-clause
|
cloudtools/awacs
|
import json
import unittest
import awacs.aws as aws
import awacs.s3 as s3
class TestConditions(unittest.TestCase):
def test_for_all_values(self):
c = aws.Condition(
aws.ForAllValuesStringLike(
"dynamodb:requestedAttributes", ["PostDateTime", "Message", "Tags"]
)
)
pd = aws.PolicyDocument(
Statement=[
aws.Statement(
Action=[s3.ListBucket],
Effect=aws.Allow,
Resource=[s3.ARN("myBucket")],
Condition=c,
)
]
)
self.assertEqual(
{
- u"Statement": [
+ "Statement": [
{
- u"Action": [u"s3:ListBucket"],
+ "Action": ["s3:ListBucket"],
- u"Condition": {
+ "Condition": {
- u"ForAllValues:StringLike": {
+ "ForAllValues:StringLike": {
- u"dynamodb:requestedAttributes": [
+ "dynamodb:requestedAttributes": [
- u"PostDateTime",
+ "PostDateTime",
- u"Message",
+ "Message",
- u"Tags",
+ "Tags",
]
}
},
- u"Effect": u"Allow",
+ "Effect": "Allow",
- u"Resource": [u"arn:aws:s3:::myBucket"],
+ "Resource": ["arn:aws:s3:::myBucket"],
}
]
},
json.loads(pd.to_json()),
)
|
Remove 'u' prefix from strings
|
## Code Before:
import json
import unittest
import awacs.aws as aws
import awacs.s3 as s3
class TestConditions(unittest.TestCase):
def test_for_all_values(self):
c = aws.Condition(
aws.ForAllValuesStringLike(
"dynamodb:requestedAttributes", ["PostDateTime", "Message", "Tags"]
)
)
pd = aws.PolicyDocument(
Statement=[
aws.Statement(
Action=[s3.ListBucket],
Effect=aws.Allow,
Resource=[s3.ARN("myBucket")],
Condition=c,
)
]
)
self.assertEqual(
{
u"Statement": [
{
u"Action": [u"s3:ListBucket"],
u"Condition": {
u"ForAllValues:StringLike": {
u"dynamodb:requestedAttributes": [
u"PostDateTime",
u"Message",
u"Tags",
]
}
},
u"Effect": u"Allow",
u"Resource": [u"arn:aws:s3:::myBucket"],
}
]
},
json.loads(pd.to_json()),
)
## Instruction:
Remove 'u' prefix from strings
## Code After:
import json
import unittest
import awacs.aws as aws
import awacs.s3 as s3
class TestConditions(unittest.TestCase):
def test_for_all_values(self):
c = aws.Condition(
aws.ForAllValuesStringLike(
"dynamodb:requestedAttributes", ["PostDateTime", "Message", "Tags"]
)
)
pd = aws.PolicyDocument(
Statement=[
aws.Statement(
Action=[s3.ListBucket],
Effect=aws.Allow,
Resource=[s3.ARN("myBucket")],
Condition=c,
)
]
)
self.assertEqual(
{
"Statement": [
{
"Action": ["s3:ListBucket"],
"Condition": {
"ForAllValues:StringLike": {
"dynamodb:requestedAttributes": [
"PostDateTime",
"Message",
"Tags",
]
}
},
"Effect": "Allow",
"Resource": ["arn:aws:s3:::myBucket"],
}
]
},
json.loads(pd.to_json()),
)
|
import json
import unittest
import awacs.aws as aws
import awacs.s3 as s3
class TestConditions(unittest.TestCase):
def test_for_all_values(self):
c = aws.Condition(
aws.ForAllValuesStringLike(
"dynamodb:requestedAttributes", ["PostDateTime", "Message", "Tags"]
)
)
pd = aws.PolicyDocument(
Statement=[
aws.Statement(
Action=[s3.ListBucket],
Effect=aws.Allow,
Resource=[s3.ARN("myBucket")],
Condition=c,
)
]
)
self.assertEqual(
{
- u"Statement": [
? -
+ "Statement": [
{
- u"Action": [u"s3:ListBucket"],
? - -
+ "Action": ["s3:ListBucket"],
- u"Condition": {
? -
+ "Condition": {
- u"ForAllValues:StringLike": {
? -
+ "ForAllValues:StringLike": {
- u"dynamodb:requestedAttributes": [
? -
+ "dynamodb:requestedAttributes": [
- u"PostDateTime",
? -
+ "PostDateTime",
- u"Message",
? -
+ "Message",
- u"Tags",
? -
+ "Tags",
]
}
},
- u"Effect": u"Allow",
? - -
+ "Effect": "Allow",
- u"Resource": [u"arn:aws:s3:::myBucket"],
? - -
+ "Resource": ["arn:aws:s3:::myBucket"],
}
]
},
json.loads(pd.to_json()),
)
|
a510d20cebe2aff86a6bf842d063b5df8937a7ec
|
raven/contrib/pylons/__init__.py
|
raven/contrib/pylons/__init__.py
|
from raven.middleware import Sentry as Middleware
from raven.base import Client
class Sentry(Middleware):
def __init__(self, app, config):
if not config.get('sentry.servers'):
raise TypeError('The sentry.servers config variable is required')
client = Client(
servers=config['sentry.servers'].split(),
name=config.get('sentry.name'),
key=config.get('sentry.key'),
public_key=config.get('sentry.public_key'),
secret_key=config.get('sentry.secret_key'),
project=config.get('sentry.site_project'),
site=config.get('sentry.site_name'),
include_paths=config.get(
'sentry.include_paths', '').split() or None,
exclude_paths=config.get(
'sentry.exclude_paths', '').split() or None,
)
super(Sentry, self).__init__(app, client)
|
from raven.middleware import Sentry as Middleware
from raven.base import Client
def list_from_setting(config, setting):
value = config.get(setting)
if not value:
return None
return value.split()
class Sentry(Middleware):
def __init__(self, app, config):
if not config.get('sentry.servers'):
raise TypeError('The sentry.servers config variable is required')
servers = config.get('sentry_servers')
if servers:
servers = servers.split()
client = Client(
dsn=config.get('sentry.dsn'),
servers=list_from_setting(config, 'sentry.servers'),
name=config.get('sentry.name'),
key=config.get('sentry.key'),
public_key=config.get('sentry.public_key'),
secret_key=config.get('sentry.secret_key'),
project=config.get('sentry.project'),
site=config.get('sentry.site'),
include_paths=list_from_setting(config, 'sentry.include_paths'),
exclude_paths=list_from_setting(config, 'sentry.exclude_paths'),
)
super(Sentry, self).__init__(app, client)
|
Update site and project names for pylons integration. Fix behavior of empty lists. Add DSN.
|
Update site and project names for pylons integration. Fix behavior of empty lists. Add DSN.
|
Python
|
bsd-3-clause
|
tarkatronic/opbeat_python,tarkatronic/opbeat_python,inspirehep/raven-python,ticosax/opbeat_python,jbarbuto/raven-python,jmagnusson/raven-python,ronaldevers/raven-python,akalipetis/raven-python,collective/mr.poe,patrys/opbeat_python,arthurlogilab/raven-python,percipient/raven-python,inspirehep/raven-python,ronaldevers/raven-python,icereval/raven-python,jmp0xf/raven-python,Photonomie/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,nikolas/raven-python,patrys/opbeat_python,patrys/opbeat_python,dbravender/raven-python,akheron/raven-python,lopter/raven-python-old,akheron/raven-python,inspirehep/raven-python,akheron/raven-python,smarkets/raven-python,johansteffner/raven-python,arthurlogilab/raven-python,lepture/raven-python,danriti/raven-python,getsentry/raven-python,arthurlogilab/raven-python,johansteffner/raven-python,arthurlogilab/raven-python,openlabs/raven,recht/raven-python,beniwohli/apm-agent-python,ewdurbin/raven-python,smarkets/raven-python,alex/raven,jmagnusson/raven-python,jbarbuto/raven-python,beniwohli/apm-agent-python,someonehan/raven-python,hzy/raven-python,icereval/raven-python,danriti/raven-python,someonehan/raven-python,jmp0xf/raven-python,akalipetis/raven-python,icereval/raven-python,ewdurbin/raven-python,ticosax/opbeat_python,nikolas/raven-python,ewdurbin/raven-python,jmp0xf/raven-python,nikolas/raven-python,patrys/opbeat_python,daikeren/opbeat_python,Photonomie/raven-python,dbravender/raven-python,getsentry/raven-python,beniwohli/apm-agent-python,ronaldevers/raven-python,icereval/raven-python,jbarbuto/raven-python,johansteffner/raven-python,dirtycoder/opbeat_python,recht/raven-python,danriti/raven-python,hzy/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,akalipetis/raven-python,smarkets/raven-python,hzy/raven-python,lepture/raven-python,inspirehep/raven-python,daikeren/opbeat_python,percipient/raven-python,someonehan/raven-python,dbravender/raven-python,recht/raven-python,percipient/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,jbarbuto/raven-python,jmagnusson/raven-python,daikeren/opbeat_python,Goldmund-Wyldebeast-Wunderliebe/raven-python,smarkets/raven-python,dirtycoder/opbeat_python,tarkatronic/opbeat_python,nikolas/raven-python,Photonomie/raven-python,getsentry/raven-python,dirtycoder/opbeat_python,beniwohli/apm-agent-python,ticosax/opbeat_python,lepture/raven-python
|
from raven.middleware import Sentry as Middleware
from raven.base import Client
+
+
+ def list_from_setting(config, setting):
+ value = config.get(setting)
+ if not value:
+ return None
+ return value.split()
class Sentry(Middleware):
def __init__(self, app, config):
if not config.get('sentry.servers'):
raise TypeError('The sentry.servers config variable is required')
+ servers = config.get('sentry_servers')
+ if servers:
+ servers = servers.split()
+
client = Client(
+ dsn=config.get('sentry.dsn'),
- servers=config['sentry.servers'].split(),
+ servers=list_from_setting(config, 'sentry.servers'),
name=config.get('sentry.name'),
key=config.get('sentry.key'),
public_key=config.get('sentry.public_key'),
secret_key=config.get('sentry.secret_key'),
- project=config.get('sentry.site_project'),
+ project=config.get('sentry.project'),
- site=config.get('sentry.site_name'),
+ site=config.get('sentry.site'),
+ include_paths=list_from_setting(config, 'sentry.include_paths'),
+ exclude_paths=list_from_setting(config, 'sentry.exclude_paths'),
- include_paths=config.get(
- 'sentry.include_paths', '').split() or None,
- exclude_paths=config.get(
- 'sentry.exclude_paths', '').split() or None,
)
super(Sentry, self).__init__(app, client)
|
Update site and project names for pylons integration. Fix behavior of empty lists. Add DSN.
|
## Code Before:
from raven.middleware import Sentry as Middleware
from raven.base import Client
class Sentry(Middleware):
def __init__(self, app, config):
if not config.get('sentry.servers'):
raise TypeError('The sentry.servers config variable is required')
client = Client(
servers=config['sentry.servers'].split(),
name=config.get('sentry.name'),
key=config.get('sentry.key'),
public_key=config.get('sentry.public_key'),
secret_key=config.get('sentry.secret_key'),
project=config.get('sentry.site_project'),
site=config.get('sentry.site_name'),
include_paths=config.get(
'sentry.include_paths', '').split() or None,
exclude_paths=config.get(
'sentry.exclude_paths', '').split() or None,
)
super(Sentry, self).__init__(app, client)
## Instruction:
Update site and project names for pylons integration. Fix behavior of empty lists. Add DSN.
## Code After:
from raven.middleware import Sentry as Middleware
from raven.base import Client
def list_from_setting(config, setting):
value = config.get(setting)
if not value:
return None
return value.split()
class Sentry(Middleware):
def __init__(self, app, config):
if not config.get('sentry.servers'):
raise TypeError('The sentry.servers config variable is required')
servers = config.get('sentry_servers')
if servers:
servers = servers.split()
client = Client(
dsn=config.get('sentry.dsn'),
servers=list_from_setting(config, 'sentry.servers'),
name=config.get('sentry.name'),
key=config.get('sentry.key'),
public_key=config.get('sentry.public_key'),
secret_key=config.get('sentry.secret_key'),
project=config.get('sentry.project'),
site=config.get('sentry.site'),
include_paths=list_from_setting(config, 'sentry.include_paths'),
exclude_paths=list_from_setting(config, 'sentry.exclude_paths'),
)
super(Sentry, self).__init__(app, client)
|
from raven.middleware import Sentry as Middleware
from raven.base import Client
+
+
+ def list_from_setting(config, setting):
+ value = config.get(setting)
+ if not value:
+ return None
+ return value.split()
class Sentry(Middleware):
def __init__(self, app, config):
if not config.get('sentry.servers'):
raise TypeError('The sentry.servers config variable is required')
+ servers = config.get('sentry_servers')
+ if servers:
+ servers = servers.split()
+
client = Client(
+ dsn=config.get('sentry.dsn'),
- servers=config['sentry.servers'].split(),
? ^ --------
+ servers=list_from_setting(config, 'sentry.servers'),
? ++++++++++++++++++ ^^
name=config.get('sentry.name'),
key=config.get('sentry.key'),
public_key=config.get('sentry.public_key'),
secret_key=config.get('sentry.secret_key'),
- project=config.get('sentry.site_project'),
? -----
+ project=config.get('sentry.project'),
- site=config.get('sentry.site_name'),
? -----
+ site=config.get('sentry.site'),
+ include_paths=list_from_setting(config, 'sentry.include_paths'),
+ exclude_paths=list_from_setting(config, 'sentry.exclude_paths'),
- include_paths=config.get(
- 'sentry.include_paths', '').split() or None,
- exclude_paths=config.get(
- 'sentry.exclude_paths', '').split() or None,
)
super(Sentry, self).__init__(app, client)
|
6f29293e6f447dfd80d10c173b7c5a6cc13a4243
|
main/urls.py
|
main/urls.py
|
from django.conf.urls import url
from django.views import generic
from . import views
app_name = 'main'
urlpatterns = [
url(r'^$', views.AboutView.as_view(), name='about'),
url(r'^chas/$', views.AboutChasView.as_view(), name='chas'),
url(r'^evan/$', views.AboutEvanView.as_view(), name='evan'),
]
|
from django.urls import include, path
from . import views
app_name = 'main'
urlpatterns = [
path('', views.AboutView.as_view(), name='about'),
path('chas/', views.AboutChasView.as_view(), name='chas'),
path('evan/', views.AboutEvanView.as_view(), name='evan'),
]
|
Move some urlpatterns to DJango 2.0 preferred method
|
Move some urlpatterns to DJango 2.0 preferred method
|
Python
|
mit
|
evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca
|
+ from django.urls import include, path
- from django.conf.urls import url
- from django.views import generic
from . import views
app_name = 'main'
urlpatterns = [
- url(r'^$', views.AboutView.as_view(), name='about'),
+ path('', views.AboutView.as_view(), name='about'),
- url(r'^chas/$', views.AboutChasView.as_view(), name='chas'),
+ path('chas/', views.AboutChasView.as_view(), name='chas'),
- url(r'^evan/$', views.AboutEvanView.as_view(), name='evan'),
+ path('evan/', views.AboutEvanView.as_view(), name='evan'),
]
|
Move some urlpatterns to DJango 2.0 preferred method
|
## Code Before:
from django.conf.urls import url
from django.views import generic
from . import views
app_name = 'main'
urlpatterns = [
url(r'^$', views.AboutView.as_view(), name='about'),
url(r'^chas/$', views.AboutChasView.as_view(), name='chas'),
url(r'^evan/$', views.AboutEvanView.as_view(), name='evan'),
]
## Instruction:
Move some urlpatterns to DJango 2.0 preferred method
## Code After:
from django.urls import include, path
from . import views
app_name = 'main'
urlpatterns = [
path('', views.AboutView.as_view(), name='about'),
path('chas/', views.AboutChasView.as_view(), name='chas'),
path('evan/', views.AboutEvanView.as_view(), name='evan'),
]
|
+ from django.urls import include, path
- from django.conf.urls import url
- from django.views import generic
from . import views
app_name = 'main'
urlpatterns = [
- url(r'^$', views.AboutView.as_view(), name='about'),
? ^^^ - --
+ path('', views.AboutView.as_view(), name='about'),
? ^^^^
- url(r'^chas/$', views.AboutChasView.as_view(), name='chas'),
? ^^^ - - -
+ path('chas/', views.AboutChasView.as_view(), name='chas'),
? ^^^^
- url(r'^evan/$', views.AboutEvanView.as_view(), name='evan'),
? ^^^ - - -
+ path('evan/', views.AboutEvanView.as_view(), name='evan'),
? ^^^^
]
|
f8d94b93427ff92ae6eed58a81058cce4e661cd2
|
solum/tests/common/test_service.py
|
solum/tests/common/test_service.py
|
import testtools
from solum.common import service
from solum import objects
from solum.objects import plan as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Plan()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Plan, abstract.Plan))
|
import testtools
from solum.common import service
from solum import objects
from solum.objects import component as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Component()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Component,
abstract.Component))
|
Test service with Component instead of Plan db object
|
Test service with Component instead of Plan db object
Since plan db objects are getting removed in add-plan-in-swift,
we need to test service with another object.
Change-Id: I85537ef17f8c125d3de85ab3625ea91e9487376f
|
Python
|
apache-2.0
|
gilbertpilz/solum,ed-/solum,devdattakulkarni/test-solum,stackforge/solum,gilbertpilz/solum,openstack/solum,ed-/solum,devdattakulkarni/test-solum,ed-/solum,ed-/solum,gilbertpilz/solum,gilbertpilz/solum,openstack/solum,stackforge/solum
|
import testtools
from solum.common import service
from solum import objects
- from solum.objects import plan as abstract
+ from solum.objects import component as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
- objects.registry.Plan()
+ objects.registry.Component()
service.prepare_service([])
- self.assertTrue(issubclass(objects.registry.Plan, abstract.Plan))
+ self.assertTrue(issubclass(objects.registry.Component,
+ abstract.Component))
|
Test service with Component instead of Plan db object
|
## Code Before:
import testtools
from solum.common import service
from solum import objects
from solum.objects import plan as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Plan()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Plan, abstract.Plan))
## Instruction:
Test service with Component instead of Plan db object
## Code After:
import testtools
from solum.common import service
from solum import objects
from solum.objects import component as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Component()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Component,
abstract.Component))
|
import testtools
from solum.common import service
from solum import objects
- from solum.objects import plan as abstract
? ^^
+ from solum.objects import component as abstract
? +++ ^ +++
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
- objects.registry.Plan()
? ^^^
+ objects.registry.Component()
? ^^^^^ +++
service.prepare_service([])
- self.assertTrue(issubclass(objects.registry.Plan, abstract.Plan))
? ^^^ ----------------
+ self.assertTrue(issubclass(objects.registry.Component,
? ^^^^^ +++
+ abstract.Component))
|
dc65920f52ca584608633cc511590b41b590f79e
|
billjobs/permissions.py
|
billjobs/permissions.py
|
from rest_framework import permissions
class CustomUserAPIPermission(permissions.BasePermission):
"""
Set custom permission for UserAPI
* GET : only accessible by admin
* POST : is public, everyone can create a user
"""
def has_permission(self, request, view):
"""
Define permission based on request method
"""
if request.method == 'GET':
# admin only
return request.user and request.user.is_staff
elif request.method == 'POST':
# is public
return True
# all other methods are accepted to allow 405 response
return True
|
from rest_framework import permissions
from rest_framework.compat import is_authenticated
class CustomUserAPIPermission(permissions.BasePermission):
"""
Set custom permission for UserAPI
* GET : only accessible by admin
* POST : is public, everyone can create a user
"""
def has_permission(self, request, view):
"""
Define permission based on request method
"""
if request.method == 'GET':
# admin only
return request.user and request.user.is_staff
elif request.method == 'POST':
# is public
return True
# all other methods are accepted to allow 405 response
return True
class CustomUserDetailAPIPermission(permissions.BasePermission):
"""
Set custom permission for user detail API
* GET, PUT, DELETE :
* admin can access all users instance
* current user only his instance
* public is forbidden
"""
def has_permission(self, request, view):
"""
Give permission for admin or user to access API
"""
return (
request.user and
request.user.is_staff or
is_authenticated(request.user)
)
def has_object_permission(self, request, view, obj):
"""
Compare User instance in request is equal to User instance in obj
"""
return request.user.is_staff or obj == request.user
|
Create permission for admin and user can access GET, PUT, DELETE method, user can access is instance only
|
Create permission for admin and user can access GET, PUT, DELETE method, user can access is instance only
|
Python
|
mit
|
ioO/billjobs
|
from rest_framework import permissions
+ from rest_framework.compat import is_authenticated
class CustomUserAPIPermission(permissions.BasePermission):
"""
Set custom permission for UserAPI
* GET : only accessible by admin
* POST : is public, everyone can create a user
"""
def has_permission(self, request, view):
"""
Define permission based on request method
"""
if request.method == 'GET':
# admin only
return request.user and request.user.is_staff
elif request.method == 'POST':
# is public
return True
# all other methods are accepted to allow 405 response
return True
+ class CustomUserDetailAPIPermission(permissions.BasePermission):
+ """
+ Set custom permission for user detail API
+
+ * GET, PUT, DELETE :
+ * admin can access all users instance
+ * current user only his instance
+ * public is forbidden
+ """
+ def has_permission(self, request, view):
+ """
+ Give permission for admin or user to access API
+ """
+ return (
+ request.user and
+ request.user.is_staff or
+ is_authenticated(request.user)
+ )
+
+ def has_object_permission(self, request, view, obj):
+ """
+ Compare User instance in request is equal to User instance in obj
+ """
+ return request.user.is_staff or obj == request.user
+
|
Create permission for admin and user can access GET, PUT, DELETE method, user can access is instance only
|
## Code Before:
from rest_framework import permissions
class CustomUserAPIPermission(permissions.BasePermission):
"""
Set custom permission for UserAPI
* GET : only accessible by admin
* POST : is public, everyone can create a user
"""
def has_permission(self, request, view):
"""
Define permission based on request method
"""
if request.method == 'GET':
# admin only
return request.user and request.user.is_staff
elif request.method == 'POST':
# is public
return True
# all other methods are accepted to allow 405 response
return True
## Instruction:
Create permission for admin and user can access GET, PUT, DELETE method, user can access is instance only
## Code After:
from rest_framework import permissions
from rest_framework.compat import is_authenticated
class CustomUserAPIPermission(permissions.BasePermission):
"""
Set custom permission for UserAPI
* GET : only accessible by admin
* POST : is public, everyone can create a user
"""
def has_permission(self, request, view):
"""
Define permission based on request method
"""
if request.method == 'GET':
# admin only
return request.user and request.user.is_staff
elif request.method == 'POST':
# is public
return True
# all other methods are accepted to allow 405 response
return True
class CustomUserDetailAPIPermission(permissions.BasePermission):
"""
Set custom permission for user detail API
* GET, PUT, DELETE :
* admin can access all users instance
* current user only his instance
* public is forbidden
"""
def has_permission(self, request, view):
"""
Give permission for admin or user to access API
"""
return (
request.user and
request.user.is_staff or
is_authenticated(request.user)
)
def has_object_permission(self, request, view, obj):
"""
Compare User instance in request is equal to User instance in obj
"""
return request.user.is_staff or obj == request.user
|
from rest_framework import permissions
+ from rest_framework.compat import is_authenticated
class CustomUserAPIPermission(permissions.BasePermission):
"""
Set custom permission for UserAPI
* GET : only accessible by admin
* POST : is public, everyone can create a user
"""
def has_permission(self, request, view):
"""
Define permission based on request method
"""
if request.method == 'GET':
# admin only
return request.user and request.user.is_staff
elif request.method == 'POST':
# is public
return True
# all other methods are accepted to allow 405 response
return True
+
+ class CustomUserDetailAPIPermission(permissions.BasePermission):
+ """
+ Set custom permission for user detail API
+
+ * GET, PUT, DELETE :
+ * admin can access all users instance
+ * current user only his instance
+ * public is forbidden
+ """
+ def has_permission(self, request, view):
+ """
+ Give permission for admin or user to access API
+ """
+ return (
+ request.user and
+ request.user.is_staff or
+ is_authenticated(request.user)
+ )
+
+ def has_object_permission(self, request, view, obj):
+ """
+ Compare User instance in request is equal to User instance in obj
+ """
+ return request.user.is_staff or obj == request.user
|
deb87fefcc7fa76de3ae29ae58e816e49184d100
|
openfisca_core/model_api.py
|
openfisca_core/model_api.py
|
from datetime import date # noqa analysis:ignore
from numpy import maximum as max_, minimum as min_, logical_not as not_, where, select # noqa analysis:ignore
from .columns import ( # noqa analysis:ignore
AgeCol,
BoolCol,
DateCol,
EnumCol,
FixedStrCol,
FloatCol,
IntCol,
PeriodSizeIndependentIntCol,
StrCol,
)
from .enumerations import Enum # noqa analysis:ignore
from .formulas import ( # noqa analysis:ignore
ADD,
calculate_output_add,
calculate_output_divide,
dated_function,
DIVIDE,
set_input_dispatch_by_period,
set_input_divide_by_period,
missing_value
)
from .base_functions import ( # noqa analysis:ignore
requested_period_added_value,
requested_period_default_value,
requested_period_last_or_next_value,
requested_period_last_value,
)
from .variables import DatedVariable, Variable # noqa analysis:ignore
from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore
from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore
from .reforms import Reform # noqa analysis:ignore
|
from datetime import date # noqa analysis:ignore
from numpy import ( # noqa analysis:ignore
logical_not as not_,
maximum as max_,
minimum as min_,
round as round_,
select,
where,
)
from .columns import ( # noqa analysis:ignore
AgeCol,
BoolCol,
DateCol,
EnumCol,
FixedStrCol,
FloatCol,
IntCol,
PeriodSizeIndependentIntCol,
StrCol,
)
from .enumerations import Enum # noqa analysis:ignore
from .formulas import ( # noqa analysis:ignore
ADD,
calculate_output_add,
calculate_output_divide,
dated_function,
DIVIDE,
set_input_dispatch_by_period,
set_input_divide_by_period,
missing_value
)
from .base_functions import ( # noqa analysis:ignore
requested_period_added_value,
requested_period_default_value,
requested_period_last_or_next_value,
requested_period_last_value,
)
from .variables import DatedVariable, Variable # noqa analysis:ignore
from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore
from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore
from .reforms import Reform # noqa analysis:ignore
|
Add numpy.round to model api
|
Add numpy.round to model api
|
Python
|
agpl-3.0
|
openfisca/openfisca-core,openfisca/openfisca-core
|
from datetime import date # noqa analysis:ignore
- from numpy import maximum as max_, minimum as min_, logical_not as not_, where, select # noqa analysis:ignore
+ from numpy import ( # noqa analysis:ignore
+ logical_not as not_,
+ maximum as max_,
+ minimum as min_,
+ round as round_,
+ select,
+ where,
+ )
from .columns import ( # noqa analysis:ignore
AgeCol,
BoolCol,
DateCol,
EnumCol,
FixedStrCol,
FloatCol,
IntCol,
PeriodSizeIndependentIntCol,
StrCol,
)
from .enumerations import Enum # noqa analysis:ignore
from .formulas import ( # noqa analysis:ignore
ADD,
calculate_output_add,
calculate_output_divide,
dated_function,
DIVIDE,
set_input_dispatch_by_period,
set_input_divide_by_period,
missing_value
)
from .base_functions import ( # noqa analysis:ignore
requested_period_added_value,
requested_period_default_value,
requested_period_last_or_next_value,
requested_period_last_value,
)
from .variables import DatedVariable, Variable # noqa analysis:ignore
from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore
from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore
from .reforms import Reform # noqa analysis:ignore
|
Add numpy.round to model api
|
## Code Before:
from datetime import date # noqa analysis:ignore
from numpy import maximum as max_, minimum as min_, logical_not as not_, where, select # noqa analysis:ignore
from .columns import ( # noqa analysis:ignore
AgeCol,
BoolCol,
DateCol,
EnumCol,
FixedStrCol,
FloatCol,
IntCol,
PeriodSizeIndependentIntCol,
StrCol,
)
from .enumerations import Enum # noqa analysis:ignore
from .formulas import ( # noqa analysis:ignore
ADD,
calculate_output_add,
calculate_output_divide,
dated_function,
DIVIDE,
set_input_dispatch_by_period,
set_input_divide_by_period,
missing_value
)
from .base_functions import ( # noqa analysis:ignore
requested_period_added_value,
requested_period_default_value,
requested_period_last_or_next_value,
requested_period_last_value,
)
from .variables import DatedVariable, Variable # noqa analysis:ignore
from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore
from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore
from .reforms import Reform # noqa analysis:ignore
## Instruction:
Add numpy.round to model api
## Code After:
from datetime import date # noqa analysis:ignore
from numpy import ( # noqa analysis:ignore
logical_not as not_,
maximum as max_,
minimum as min_,
round as round_,
select,
where,
)
from .columns import ( # noqa analysis:ignore
AgeCol,
BoolCol,
DateCol,
EnumCol,
FixedStrCol,
FloatCol,
IntCol,
PeriodSizeIndependentIntCol,
StrCol,
)
from .enumerations import Enum # noqa analysis:ignore
from .formulas import ( # noqa analysis:ignore
ADD,
calculate_output_add,
calculate_output_divide,
dated_function,
DIVIDE,
set_input_dispatch_by_period,
set_input_divide_by_period,
missing_value
)
from .base_functions import ( # noqa analysis:ignore
requested_period_added_value,
requested_period_default_value,
requested_period_last_or_next_value,
requested_period_last_value,
)
from .variables import DatedVariable, Variable # noqa analysis:ignore
from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore
from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore
from .reforms import Reform # noqa analysis:ignore
|
from datetime import date # noqa analysis:ignore
- from numpy import maximum as max_, minimum as min_, logical_not as not_, where, select # noqa analysis:ignore
+ from numpy import ( # noqa analysis:ignore
+ logical_not as not_,
+ maximum as max_,
+ minimum as min_,
+ round as round_,
+ select,
+ where,
+ )
from .columns import ( # noqa analysis:ignore
AgeCol,
BoolCol,
DateCol,
EnumCol,
FixedStrCol,
FloatCol,
IntCol,
PeriodSizeIndependentIntCol,
StrCol,
)
from .enumerations import Enum # noqa analysis:ignore
from .formulas import ( # noqa analysis:ignore
ADD,
calculate_output_add,
calculate_output_divide,
dated_function,
DIVIDE,
set_input_dispatch_by_period,
set_input_divide_by_period,
missing_value
)
from .base_functions import ( # noqa analysis:ignore
requested_period_added_value,
requested_period_default_value,
requested_period_last_or_next_value,
requested_period_last_value,
)
from .variables import DatedVariable, Variable # noqa analysis:ignore
from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore
from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore
from .reforms import Reform # noqa analysis:ignore
|
eacc1f88f7e34e26c3a4d29ec009b4984c10a345
|
SimPEG/Mesh/__init__.py
|
SimPEG/Mesh/__init__.py
|
from TensorMesh import TensorMesh
from CylMesh import CylMesh
from Cyl1DMesh import Cyl1DMesh
from LogicallyRectMesh import LogicallyRectMesh
from TreeMesh import TreeMesh
from BaseMesh import BaseMesh
|
from TensorMesh import TensorMesh
from CylMesh import CylMesh
from LogicallyRectMesh import LogicallyRectMesh
from TreeMesh import TreeMesh
from BaseMesh import BaseMesh
|
Remove Cyl1DMesh from init file...
|
Remove Cyl1DMesh from init file...
|
Python
|
mit
|
simpeg/discretize,simpeg/simpeg,simpeg/discretize,simpeg/discretize
|
from TensorMesh import TensorMesh
from CylMesh import CylMesh
- from Cyl1DMesh import Cyl1DMesh
from LogicallyRectMesh import LogicallyRectMesh
from TreeMesh import TreeMesh
from BaseMesh import BaseMesh
|
Remove Cyl1DMesh from init file...
|
## Code Before:
from TensorMesh import TensorMesh
from CylMesh import CylMesh
from Cyl1DMesh import Cyl1DMesh
from LogicallyRectMesh import LogicallyRectMesh
from TreeMesh import TreeMesh
from BaseMesh import BaseMesh
## Instruction:
Remove Cyl1DMesh from init file...
## Code After:
from TensorMesh import TensorMesh
from CylMesh import CylMesh
from LogicallyRectMesh import LogicallyRectMesh
from TreeMesh import TreeMesh
from BaseMesh import BaseMesh
|
from TensorMesh import TensorMesh
from CylMesh import CylMesh
- from Cyl1DMesh import Cyl1DMesh
from LogicallyRectMesh import LogicallyRectMesh
from TreeMesh import TreeMesh
from BaseMesh import BaseMesh
|
a6a59cc0fded7bd2f6dc1d0d01e68836f33726aa
|
mdotdevs/tests.py
|
mdotdevs/tests.py
|
from django.test import TestCase
# Create your tests here.
|
from django.test import TestCase
from django.test import Client
from django.core.urlresolvers import resolve
class MdotdevTest(TestCase):
def setUp(self):
self.client = Client()
pass
def test_url_home(self):
resolver = resolve('/developers/')
self.assertEqual('home', resolver.view_name)
def test_url_guidelines(self):
resolver = resolve('/developers/guidelines/')
self.assertEqual('guidelines', resolver.view_name)
def test_url_process(self):
resolver = resolve('/developers/process/')
self.assertEqual('process', resolver.view_name)
def test_url_review(self):
resolver = resolve('/developers/review/')
self.assertEqual('review', resolver.view_name)
def test_view_home(self):
response = self.client.get('/developers/')
self.assertEqual(response.status_code, 200)
def test_view_guidelines(self):
response = self.client.get('/developers/guidelines/')
self.assertEqual(response.status_code, 200)
def test_view_process(self):
response = self.client.get('/developers/process/')
self.assertEqual(response.status_code, 200)
def test_view_review(self):
response = self.client.get('/developers/process/')
self.assertEqual(response.status_code, 200)
def tearDown(self):
pass
|
Test the urls.py and views.py.
|
Test the urls.py and views.py.
|
Python
|
apache-2.0
|
uw-it-aca/mdot-developers,uw-it-aca/mdot-developers
|
from django.test import TestCase
+ from django.test import Client
+ from django.core.urlresolvers import resolve
- # Create your tests here.
+ class MdotdevTest(TestCase):
+ def setUp(self):
+ self.client = Client()
+ pass
+
+ def test_url_home(self):
+ resolver = resolve('/developers/')
+ self.assertEqual('home', resolver.view_name)
+
+ def test_url_guidelines(self):
+ resolver = resolve('/developers/guidelines/')
+ self.assertEqual('guidelines', resolver.view_name)
+
+ def test_url_process(self):
+ resolver = resolve('/developers/process/')
+ self.assertEqual('process', resolver.view_name)
+
+ def test_url_review(self):
+ resolver = resolve('/developers/review/')
+ self.assertEqual('review', resolver.view_name)
+
+ def test_view_home(self):
+ response = self.client.get('/developers/')
+ self.assertEqual(response.status_code, 200)
+
+ def test_view_guidelines(self):
+ response = self.client.get('/developers/guidelines/')
+ self.assertEqual(response.status_code, 200)
+
+ def test_view_process(self):
+ response = self.client.get('/developers/process/')
+ self.assertEqual(response.status_code, 200)
+
+ def test_view_review(self):
+ response = self.client.get('/developers/process/')
+ self.assertEqual(response.status_code, 200)
+
+ def tearDown(self):
+ pass
+
|
Test the urls.py and views.py.
|
## Code Before:
from django.test import TestCase
# Create your tests here.
## Instruction:
Test the urls.py and views.py.
## Code After:
from django.test import TestCase
from django.test import Client
from django.core.urlresolvers import resolve
class MdotdevTest(TestCase):
def setUp(self):
self.client = Client()
pass
def test_url_home(self):
resolver = resolve('/developers/')
self.assertEqual('home', resolver.view_name)
def test_url_guidelines(self):
resolver = resolve('/developers/guidelines/')
self.assertEqual('guidelines', resolver.view_name)
def test_url_process(self):
resolver = resolve('/developers/process/')
self.assertEqual('process', resolver.view_name)
def test_url_review(self):
resolver = resolve('/developers/review/')
self.assertEqual('review', resolver.view_name)
def test_view_home(self):
response = self.client.get('/developers/')
self.assertEqual(response.status_code, 200)
def test_view_guidelines(self):
response = self.client.get('/developers/guidelines/')
self.assertEqual(response.status_code, 200)
def test_view_process(self):
response = self.client.get('/developers/process/')
self.assertEqual(response.status_code, 200)
def test_view_review(self):
response = self.client.get('/developers/process/')
self.assertEqual(response.status_code, 200)
def tearDown(self):
pass
|
from django.test import TestCase
+ from django.test import Client
+ from django.core.urlresolvers import resolve
- # Create your tests here.
+ class MdotdevTest(TestCase):
+
+ def setUp(self):
+ self.client = Client()
+ pass
+
+ def test_url_home(self):
+ resolver = resolve('/developers/')
+ self.assertEqual('home', resolver.view_name)
+
+ def test_url_guidelines(self):
+ resolver = resolve('/developers/guidelines/')
+ self.assertEqual('guidelines', resolver.view_name)
+
+ def test_url_process(self):
+ resolver = resolve('/developers/process/')
+ self.assertEqual('process', resolver.view_name)
+
+ def test_url_review(self):
+ resolver = resolve('/developers/review/')
+ self.assertEqual('review', resolver.view_name)
+
+ def test_view_home(self):
+ response = self.client.get('/developers/')
+ self.assertEqual(response.status_code, 200)
+
+ def test_view_guidelines(self):
+ response = self.client.get('/developers/guidelines/')
+ self.assertEqual(response.status_code, 200)
+
+ def test_view_process(self):
+ response = self.client.get('/developers/process/')
+ self.assertEqual(response.status_code, 200)
+
+ def test_view_review(self):
+ response = self.client.get('/developers/process/')
+ self.assertEqual(response.status_code, 200)
+
+ def tearDown(self):
+ pass
|
25e5070a575de1ae7e20d6ede71297ab424cea87
|
bluegreen-example/app.py
|
bluegreen-example/app.py
|
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello 0-downtime %s World!" % os.environ.get('BLUEGREEN', 'bland')
|
import os
from flask import Flask, send_from_directory
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello 0-downtime %s World!" % os.environ.get('BLUEGREEN', 'bland')
@app.route("/parrots/<path:path>")
def parrot(path):
return send_from_directory(os.path.join('parrots', 'parrots'), path)
|
Add a route to send parrot images
|
Add a route to send parrot images
|
Python
|
mit
|
dbravender/gitric
|
import os
- from flask import Flask
+ from flask import Flask, send_from_directory
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello 0-downtime %s World!" % os.environ.get('BLUEGREEN', 'bland')
+
+ @app.route("/parrots/<path:path>")
+ def parrot(path):
+ return send_from_directory(os.path.join('parrots', 'parrots'), path)
+
|
Add a route to send parrot images
|
## Code Before:
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello 0-downtime %s World!" % os.environ.get('BLUEGREEN', 'bland')
## Instruction:
Add a route to send parrot images
## Code After:
import os
from flask import Flask, send_from_directory
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello 0-downtime %s World!" % os.environ.get('BLUEGREEN', 'bland')
@app.route("/parrots/<path:path>")
def parrot(path):
return send_from_directory(os.path.join('parrots', 'parrots'), path)
|
import os
- from flask import Flask
+ from flask import Flask, send_from_directory
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello 0-downtime %s World!" % os.environ.get('BLUEGREEN', 'bland')
+
+
+ @app.route("/parrots/<path:path>")
+ def parrot(path):
+ return send_from_directory(os.path.join('parrots', 'parrots'), path)
|
ac9c8fe7519ff76b4f4002ae8c50e0185fa4bb88
|
tools/test_filter.py
|
tools/test_filter.py
|
{
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bslalg_constructorproxy': [ {'OS': 'AIX', 'library': 'shared_library'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
|
{
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
|
Remove exception for bslalg_constructorproxy test driver on AIX shared library builds.
|
Remove exception for bslalg_constructorproxy test driver on AIX shared library builds.
|
Python
|
apache-2.0
|
abeels/bde,che2/bde,minhlongdo/bde,bloomberg/bde-allocator-benchmarks,bowlofstew/bde,bloomberg/bde-allocator-benchmarks,jmptrader/bde,abeels/bde,dharesign/bde,frutiger/bde,che2/bde,apaprocki/bde,RMGiroux/bde-allocator-benchmarks,idispatch/bde,gbleaney/Allocator-Benchmarks,bloomberg/bde-allocator-benchmarks,idispatch/bde,che2/bde,gbleaney/Allocator-Benchmarks,osubboo/bde,RMGiroux/bde-allocator-benchmarks,bowlofstew/bde,apaprocki/bde,frutiger/bde,mversche/bde,dbremner/bde,jmptrader/bde,minhlongdo/bde,mversche/bde,idispatch/bde,saxena84/bde,frutiger/bde,bloomberg/bde,abeels/bde,frutiger/bde,che2/bde,apaprocki/bde,gbleaney/Allocator-Benchmarks,bloomberg/bde,minhlongdo/bde,jmptrader/bde,dbremner/bde,bloomberg/bde,minhlongdo/bde,RMGiroux/bde-allocator-benchmarks,saxena84/bde,apaprocki/bde,bloomberg/bde-allocator-benchmarks,osubboo/bde,RMGiroux/bde-allocator-benchmarks,bowlofstew/bde,bloomberg/bde,dbremner/bde,bloomberg/bde,abeels/bde,saxena84/bde,mversche/bde,mversche/bde,dharesign/bde,osubboo/bde,dbremner/bde,gbleaney/Allocator-Benchmarks,bloomberg/bde-allocator-benchmarks,dharesign/bde,osubboo/bde,abeels/bde,jmptrader/bde,idispatch/bde,dharesign/bde,RMGiroux/bde-allocator-benchmarks,apaprocki/bde,saxena84/bde,abeels/bde,bowlofstew/bde
|
{
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
- 'bslalg_constructorproxy': [ {'OS': 'AIX', 'library': 'shared_library'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
|
Remove exception for bslalg_constructorproxy test driver on AIX shared library builds.
|
## Code Before:
{
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bslalg_constructorproxy': [ {'OS': 'AIX', 'library': 'shared_library'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
## Instruction:
Remove exception for bslalg_constructorproxy test driver on AIX shared library builds.
## Code After:
{
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
|
{
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
- 'bslalg_constructorproxy': [ {'OS': 'AIX', 'library': 'shared_library'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
|
4029da285fff38524cd30212475868ccda457df6
|
pylibscrypt/__init__.py
|
pylibscrypt/__init__.py
|
'Scrypt for Python'
__version__ = '1.1.0'
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# Unless we are on pypy, we want to try libsodium as well
if not _done:
import platform
if platform.python_implementation() != 'PyPy':
try:
from pylibsodium import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
__all__ = ['scrypt', 'scrypt_mcf', 'scrypt_mcf_check']
# Clean up pydoc output
del __path__
del consts
|
"""Scrypt for Python"""
__version__ = '1.2.0-git'
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# Unless we are on pypy, we want to try libsodium as well
if not _done:
import platform
if platform.python_implementation() != 'PyPy':
try:
from pylibsodium import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
__all__ = ['scrypt', 'scrypt_mcf', 'scrypt_mcf_check']
# Clean up pydoc output
del __path__
del consts
|
Increment version number for git master
|
Increment version number for git master
|
Python
|
isc
|
jvarho/pylibscrypt,jvarho/pylibscrypt
|
- 'Scrypt for Python'
+ """Scrypt for Python"""
- __version__ = '1.1.0'
+ __version__ = '1.2.0-git'
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# Unless we are on pypy, we want to try libsodium as well
if not _done:
import platform
if platform.python_implementation() != 'PyPy':
try:
from pylibsodium import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
__all__ = ['scrypt', 'scrypt_mcf', 'scrypt_mcf_check']
# Clean up pydoc output
del __path__
del consts
|
Increment version number for git master
|
## Code Before:
'Scrypt for Python'
__version__ = '1.1.0'
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# Unless we are on pypy, we want to try libsodium as well
if not _done:
import platform
if platform.python_implementation() != 'PyPy':
try:
from pylibsodium import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
__all__ = ['scrypt', 'scrypt_mcf', 'scrypt_mcf_check']
# Clean up pydoc output
del __path__
del consts
## Instruction:
Increment version number for git master
## Code After:
"""Scrypt for Python"""
__version__ = '1.2.0-git'
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# Unless we are on pypy, we want to try libsodium as well
if not _done:
import platform
if platform.python_implementation() != 'PyPy':
try:
from pylibsodium import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
__all__ = ['scrypt', 'scrypt_mcf', 'scrypt_mcf_check']
# Clean up pydoc output
del __path__
del consts
|
- 'Scrypt for Python'
? ^ ^
+ """Scrypt for Python"""
? ^^^ ^^^
- __version__ = '1.1.0'
? ^
+ __version__ = '1.2.0-git'
? ^ ++++
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# Unless we are on pypy, we want to try libsodium as well
if not _done:
import platform
if platform.python_implementation() != 'PyPy':
try:
from pylibsodium import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
__all__ = ['scrypt', 'scrypt_mcf', 'scrypt_mcf_check']
# Clean up pydoc output
del __path__
del consts
|
f7852806c3198d58162b66e18bfd9998ef33b63c
|
lexos/receivers/stats_receiver.py
|
lexos/receivers/stats_receiver.py
|
from lexos.receivers.base_receiver import BaseReceiver
class StatsReceiver(BaseReceiver):
def __init__(self):
"""So far there is no frontend option for statistics analysis"""
super().__init__()
def options_from_front_end(self):
"""So far there is no frontend option for statistics analysis"""
pass
|
from lexos.receivers.base_receiver import BaseReceiver
class StatsReceiver(BaseReceiver):
def __init__(self):
"""So far there is no frontend option for statistics analysis"""
super().__init__()
def options_from_front_end(self):
"""So far there is no frontend option for statistics analysis"""
raise NotImplementedError
|
Modify receiver to prevent using in future
|
Modify receiver to prevent using in future
|
Python
|
mit
|
WheatonCS/Lexos,WheatonCS/Lexos,WheatonCS/Lexos
|
from lexos.receivers.base_receiver import BaseReceiver
class StatsReceiver(BaseReceiver):
def __init__(self):
"""So far there is no frontend option for statistics analysis"""
super().__init__()
def options_from_front_end(self):
"""So far there is no frontend option for statistics analysis"""
- pass
+ raise NotImplementedError
|
Modify receiver to prevent using in future
|
## Code Before:
from lexos.receivers.base_receiver import BaseReceiver
class StatsReceiver(BaseReceiver):
def __init__(self):
"""So far there is no frontend option for statistics analysis"""
super().__init__()
def options_from_front_end(self):
"""So far there is no frontend option for statistics analysis"""
pass
## Instruction:
Modify receiver to prevent using in future
## Code After:
from lexos.receivers.base_receiver import BaseReceiver
class StatsReceiver(BaseReceiver):
def __init__(self):
"""So far there is no frontend option for statistics analysis"""
super().__init__()
def options_from_front_end(self):
"""So far there is no frontend option for statistics analysis"""
raise NotImplementedError
|
from lexos.receivers.base_receiver import BaseReceiver
class StatsReceiver(BaseReceiver):
def __init__(self):
"""So far there is no frontend option for statistics analysis"""
super().__init__()
def options_from_front_end(self):
"""So far there is no frontend option for statistics analysis"""
- pass
+ raise NotImplementedError
|
caab96114964a1c9154df67d97c66c701cede8d9
|
waterbutler/core/__init__.py
|
waterbutler/core/__init__.py
|
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import backgrounded
__all__ = [
'backgrounded',
'async_retry'
]
|
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import make_provider
__all__ = [
'async_retry',
'make_provider',
]
|
Allow make_provider to be imported from waterbutler core
|
Allow make_provider to be imported from waterbutler core
|
Python
|
apache-2.0
|
chrisseto/waterbutler,Johnetordoff/waterbutler,rafaeldelucena/waterbutler,cosenal/waterbutler,CenterForOpenScience/waterbutler,rdhyee/waterbutler,Ghalko/waterbutler,kwierman/waterbutler,icereval/waterbutler,felliott/waterbutler,hmoco/waterbutler,TomBaxter/waterbutler,RCOSDP/waterbutler
|
from waterbutler.core.utils import async_retry
- from waterbutler.core.utils import backgrounded
+ from waterbutler.core.utils import make_provider
__all__ = [
- 'backgrounded',
- 'async_retry'
+ 'async_retry',
+ 'make_provider',
]
|
Allow make_provider to be imported from waterbutler core
|
## Code Before:
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import backgrounded
__all__ = [
'backgrounded',
'async_retry'
]
## Instruction:
Allow make_provider to be imported from waterbutler core
## Code After:
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import make_provider
__all__ = [
'async_retry',
'make_provider',
]
|
from waterbutler.core.utils import async_retry
- from waterbutler.core.utils import backgrounded
? ^ - ^ ^^ ^
+ from waterbutler.core.utils import make_provider
? ^ ^^^ ^^ ^
__all__ = [
- 'backgrounded',
- 'async_retry'
+ 'async_retry',
? +
+ 'make_provider',
]
|
760bafe686a6937c60cf9ee162c7e59ba673a5c3
|
wagtail/embeds/migrations/0008_allow_long_urls.py
|
wagtail/embeds/migrations/0008_allow_long_urls.py
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
max_length=255,
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
Add missing max_length on temporary thumbnail_url migration
|
Add missing max_length on temporary thumbnail_url migration
Fixes #7323
|
Python
|
bsd-3-clause
|
gasman/wagtail,mixxorz/wagtail,rsalmaso/wagtail,rsalmaso/wagtail,jnns/wagtail,zerolab/wagtail,jnns/wagtail,thenewguy/wagtail,rsalmaso/wagtail,gasman/wagtail,torchbox/wagtail,gasman/wagtail,jnns/wagtail,rsalmaso/wagtail,zerolab/wagtail,thenewguy/wagtail,thenewguy/wagtail,wagtail/wagtail,zerolab/wagtail,mixxorz/wagtail,wagtail/wagtail,mixxorz/wagtail,torchbox/wagtail,zerolab/wagtail,torchbox/wagtail,mixxorz/wagtail,mixxorz/wagtail,torchbox/wagtail,zerolab/wagtail,wagtail/wagtail,wagtail/wagtail,gasman/wagtail,wagtail/wagtail,thenewguy/wagtail,gasman/wagtail,thenewguy/wagtail,rsalmaso/wagtail,jnns/wagtail
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
+ max_length=255,
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
Add missing max_length on temporary thumbnail_url migration
|
## Code Before:
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
## Instruction:
Add missing max_length on temporary thumbnail_url migration
## Code After:
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
max_length=255,
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
+ max_length=255,
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.