commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3bd116a301ce8de9d3ea1b0dd4c0a969c278455a
|
wsgi.py
|
wsgi.py
|
from shale import app
if __name__ == '__main__':
app.run(
host='127.0.0.1',
)
|
from shale import app
if __name__ == '__main__':
app.run()
|
Revert "bind flask to 127.0.0.1"
|
Revert "bind flask to 127.0.0.1"
This reverts commit 097b126e511d3d7bf5f431cc6df552843fad4477.
I guess I was way wrong about that.
|
Python
|
mit
|
mhluongo/shale,mhluongo/shale,cardforcoin/shale,cardforcoin/shale
|
from shale import app
if __name__ == '__main__':
- app.run(
+ app.run()
- host='127.0.0.1',
- )
|
Revert "bind flask to 127.0.0.1"
|
## Code Before:
from shale import app
if __name__ == '__main__':
app.run(
host='127.0.0.1',
)
## Instruction:
Revert "bind flask to 127.0.0.1"
## Code After:
from shale import app
if __name__ == '__main__':
app.run()
|
from shale import app
if __name__ == '__main__':
- app.run(
+ app.run()
? +
- host='127.0.0.1',
- )
|
98ba687e67c8d5a17560bed59f42dbe8e3fb0cf6
|
amaascore/books/enums.py
|
amaascore/books/enums.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Individual', 'Management', 'Trading', 'Wash'}
|
from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Management', 'Trading', 'Wash'}
|
Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.
|
Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.
|
Python
|
apache-2.0
|
amaas-fintech/amaas-core-sdk-python,nedlowe/amaas-core-sdk-python,paul-rs/amaas-core-sdk-python,amaas-fintech/amaas-core-sdk-python,nedlowe/amaas-core-sdk-python,paul-rs/amaas-core-sdk-python
|
from __future__ import absolute_import, division, print_function, unicode_literals
- BOOK_TYPES = {'Counterparty', 'Individual', 'Management', 'Trading', 'Wash'}
+ BOOK_TYPES = {'Counterparty', 'Management', 'Trading', 'Wash'}
|
Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.
|
## Code Before:
from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Individual', 'Management', 'Trading', 'Wash'}
## Instruction:
Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.
## Code After:
from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Management', 'Trading', 'Wash'}
|
from __future__ import absolute_import, division, print_function, unicode_literals
- BOOK_TYPES = {'Counterparty', 'Individual', 'Management', 'Trading', 'Wash'}
? --------------
+ BOOK_TYPES = {'Counterparty', 'Management', 'Trading', 'Wash'}
|
b6ec3ba9efae7b6b291391b0333e80f2e9fc6fa0
|
src/waldur_mastermind/invoices/migrations/0053_invoiceitem_uuid.py
|
src/waldur_mastermind/invoices/migrations/0053_invoiceitem_uuid.py
|
import uuid
from django.db import migrations
import waldur_core.core.fields
def gen_uuid(apps, schema_editor):
InvoiceItem = apps.get_model('invoices', 'InvoiceItem')
for row in InvoiceItem.objects.all():
row.uuid = uuid.uuid4().hex
row.save(update_fields=['uuid'])
class Migration(migrations.Migration):
dependencies = [
('invoices', '0052_delete_servicedowntime'),
]
operations = [
migrations.AddField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(null=True),
),
migrations.RunPython(gen_uuid, elidable=True),
migrations.AlterField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(),
),
]
|
import uuid
from django.db import migrations, models
import waldur_core.core.fields
def gen_uuid(apps, schema_editor):
InvoiceItem = apps.get_model('invoices', 'InvoiceItem')
for row in InvoiceItem.objects.all():
row.uuid = uuid.uuid4().hex
row.save(update_fields=['uuid'])
class Migration(migrations.Migration):
dependencies = [
('invoices', '0052_delete_servicedowntime'),
]
operations = [
migrations.AddField(
model_name='invoiceitem', name='uuid', field=models.UUIDField(null=True),
),
migrations.RunPython(gen_uuid, elidable=True),
migrations.AlterField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(),
),
]
|
Fix database migration script for UUID field in invoice item model.
|
Fix database migration script for UUID field in invoice item model.
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
|
import uuid
- from django.db import migrations
+ from django.db import migrations, models
import waldur_core.core.fields
def gen_uuid(apps, schema_editor):
InvoiceItem = apps.get_model('invoices', 'InvoiceItem')
for row in InvoiceItem.objects.all():
row.uuid = uuid.uuid4().hex
row.save(update_fields=['uuid'])
class Migration(migrations.Migration):
dependencies = [
('invoices', '0052_delete_servicedowntime'),
]
operations = [
migrations.AddField(
+ model_name='invoiceitem', name='uuid', field=models.UUIDField(null=True),
- model_name='invoiceitem',
- name='uuid',
- field=waldur_core.core.fields.UUIDField(null=True),
),
migrations.RunPython(gen_uuid, elidable=True),
migrations.AlterField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(),
),
]
|
Fix database migration script for UUID field in invoice item model.
|
## Code Before:
import uuid
from django.db import migrations
import waldur_core.core.fields
def gen_uuid(apps, schema_editor):
InvoiceItem = apps.get_model('invoices', 'InvoiceItem')
for row in InvoiceItem.objects.all():
row.uuid = uuid.uuid4().hex
row.save(update_fields=['uuid'])
class Migration(migrations.Migration):
dependencies = [
('invoices', '0052_delete_servicedowntime'),
]
operations = [
migrations.AddField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(null=True),
),
migrations.RunPython(gen_uuid, elidable=True),
migrations.AlterField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(),
),
]
## Instruction:
Fix database migration script for UUID field in invoice item model.
## Code After:
import uuid
from django.db import migrations, models
import waldur_core.core.fields
def gen_uuid(apps, schema_editor):
InvoiceItem = apps.get_model('invoices', 'InvoiceItem')
for row in InvoiceItem.objects.all():
row.uuid = uuid.uuid4().hex
row.save(update_fields=['uuid'])
class Migration(migrations.Migration):
dependencies = [
('invoices', '0052_delete_servicedowntime'),
]
operations = [
migrations.AddField(
model_name='invoiceitem', name='uuid', field=models.UUIDField(null=True),
),
migrations.RunPython(gen_uuid, elidable=True),
migrations.AlterField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(),
),
]
|
import uuid
- from django.db import migrations
+ from django.db import migrations, models
? ++++++++
import waldur_core.core.fields
def gen_uuid(apps, schema_editor):
InvoiceItem = apps.get_model('invoices', 'InvoiceItem')
for row in InvoiceItem.objects.all():
row.uuid = uuid.uuid4().hex
row.save(update_fields=['uuid'])
class Migration(migrations.Migration):
dependencies = [
('invoices', '0052_delete_servicedowntime'),
]
operations = [
migrations.AddField(
+ model_name='invoiceitem', name='uuid', field=models.UUIDField(null=True),
- model_name='invoiceitem',
- name='uuid',
- field=waldur_core.core.fields.UUIDField(null=True),
),
migrations.RunPython(gen_uuid, elidable=True),
migrations.AlterField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(),
),
]
|
78c13173fadbdc3d261ab3690ffb9c37d8f8a72d
|
bootstrap.py
|
bootstrap.py
|
from __future__ import print_function
from getpass import getpass
import readline
import sys
import annotator
from annotator.model import Consumer, User
if __name__ == '__main__':
r = raw_input("This program will perform initial setup of the annotation \n"
"store, and create the required admin accounts. Proceed? [Y/n] ")
if r and r[0] in ['n', 'N']:
sys.exit(1)
print("\nCreating SQLite database and ElasticSearch indices... ", end="")
annotator.create_app()
annotator.create_all()
print("done.\n")
username = raw_input("Admin username [admin]: ").strip()
if not username:
username = 'admin'
email = ''
while not email:
email = raw_input("Admin email: ").strip()
password = ''
while not password:
password = getpass("Admin password: ")
ckey = raw_input("Primary consumer key [annotateit]: ").strip()
if not ckey:
ckey = 'annotateit'
with annotator.app.test_request_context():
print("\nCreating admin user... ", end="")
u = User(username, email, password)
annotator.db.session.add(u)
annotator.db.session.commit()
print("done.")
print("Creating primary consumer... ", end="")
c = Consumer(ckey)
c.user_id = u.id
annotator.db.session.add(c)
annotator.db.session.commit()
print("done.\n")
print("Primary consumer secret: %s" % c.secret)
|
from __future__ import print_function
from getpass import getpass
import readline
import sys
import annotator
from annotator.model import Consumer, User
if __name__ == '__main__':
r = raw_input("This program will perform initial setup of the annotation \n"
"store, and create the required admin accounts. Proceed? [Y/n] ")
if r and r[0] in ['n', 'N']:
sys.exit(1)
print("\nCreating SQLite database and ElasticSearch indices... ", end="")
app = annotator.create_app()
annotator.create_all(app)
print("done.\n")
username = raw_input("Admin username [admin]: ").strip()
if not username:
username = 'admin'
email = ''
while not email:
email = raw_input("Admin email: ").strip()
password = ''
while not password:
password = getpass("Admin password: ")
ckey = raw_input("Primary consumer key [annotateit]: ").strip()
if not ckey:
ckey = 'annotateit'
with app.test_request_context():
db = app.extensions['sqlalchemy'].db
print("\nCreating admin user... ", end="")
u = User(username, email, password)
db.session.add(u)
db.session.commit()
print("done.")
print("Creating primary consumer... ", end="")
c = Consumer(ckey)
c.user_id = u.id
db.session.add(c)
db.session.commit()
print("done.\n")
print("Primary consumer secret: %s" % c.secret)
|
Update to reflect new create_app signature
|
Update to reflect new create_app signature
|
Python
|
mit
|
openannotation/annotator-store,nobita-isc/annotator-store,nobita-isc/annotator-store,ningyifan/annotator-store,nobita-isc/annotator-store,nobita-isc/annotator-store,happybelly/annotator-store
|
from __future__ import print_function
from getpass import getpass
import readline
import sys
import annotator
from annotator.model import Consumer, User
if __name__ == '__main__':
r = raw_input("This program will perform initial setup of the annotation \n"
"store, and create the required admin accounts. Proceed? [Y/n] ")
if r and r[0] in ['n', 'N']:
sys.exit(1)
print("\nCreating SQLite database and ElasticSearch indices... ", end="")
- annotator.create_app()
+ app = annotator.create_app()
- annotator.create_all()
+ annotator.create_all(app)
print("done.\n")
username = raw_input("Admin username [admin]: ").strip()
if not username:
username = 'admin'
email = ''
while not email:
email = raw_input("Admin email: ").strip()
password = ''
while not password:
password = getpass("Admin password: ")
ckey = raw_input("Primary consumer key [annotateit]: ").strip()
if not ckey:
ckey = 'annotateit'
- with annotator.app.test_request_context():
+ with app.test_request_context():
+ db = app.extensions['sqlalchemy'].db
print("\nCreating admin user... ", end="")
u = User(username, email, password)
- annotator.db.session.add(u)
+ db.session.add(u)
- annotator.db.session.commit()
+ db.session.commit()
print("done.")
print("Creating primary consumer... ", end="")
c = Consumer(ckey)
c.user_id = u.id
- annotator.db.session.add(c)
+ db.session.add(c)
- annotator.db.session.commit()
+ db.session.commit()
print("done.\n")
print("Primary consumer secret: %s" % c.secret)
|
Update to reflect new create_app signature
|
## Code Before:
from __future__ import print_function
from getpass import getpass
import readline
import sys
import annotator
from annotator.model import Consumer, User
if __name__ == '__main__':
r = raw_input("This program will perform initial setup of the annotation \n"
"store, and create the required admin accounts. Proceed? [Y/n] ")
if r and r[0] in ['n', 'N']:
sys.exit(1)
print("\nCreating SQLite database and ElasticSearch indices... ", end="")
annotator.create_app()
annotator.create_all()
print("done.\n")
username = raw_input("Admin username [admin]: ").strip()
if not username:
username = 'admin'
email = ''
while not email:
email = raw_input("Admin email: ").strip()
password = ''
while not password:
password = getpass("Admin password: ")
ckey = raw_input("Primary consumer key [annotateit]: ").strip()
if not ckey:
ckey = 'annotateit'
with annotator.app.test_request_context():
print("\nCreating admin user... ", end="")
u = User(username, email, password)
annotator.db.session.add(u)
annotator.db.session.commit()
print("done.")
print("Creating primary consumer... ", end="")
c = Consumer(ckey)
c.user_id = u.id
annotator.db.session.add(c)
annotator.db.session.commit()
print("done.\n")
print("Primary consumer secret: %s" % c.secret)
## Instruction:
Update to reflect new create_app signature
## Code After:
from __future__ import print_function
from getpass import getpass
import readline
import sys
import annotator
from annotator.model import Consumer, User
if __name__ == '__main__':
r = raw_input("This program will perform initial setup of the annotation \n"
"store, and create the required admin accounts. Proceed? [Y/n] ")
if r and r[0] in ['n', 'N']:
sys.exit(1)
print("\nCreating SQLite database and ElasticSearch indices... ", end="")
app = annotator.create_app()
annotator.create_all(app)
print("done.\n")
username = raw_input("Admin username [admin]: ").strip()
if not username:
username = 'admin'
email = ''
while not email:
email = raw_input("Admin email: ").strip()
password = ''
while not password:
password = getpass("Admin password: ")
ckey = raw_input("Primary consumer key [annotateit]: ").strip()
if not ckey:
ckey = 'annotateit'
with app.test_request_context():
db = app.extensions['sqlalchemy'].db
print("\nCreating admin user... ", end="")
u = User(username, email, password)
db.session.add(u)
db.session.commit()
print("done.")
print("Creating primary consumer... ", end="")
c = Consumer(ckey)
c.user_id = u.id
db.session.add(c)
db.session.commit()
print("done.\n")
print("Primary consumer secret: %s" % c.secret)
|
from __future__ import print_function
from getpass import getpass
import readline
import sys
import annotator
from annotator.model import Consumer, User
if __name__ == '__main__':
r = raw_input("This program will perform initial setup of the annotation \n"
"store, and create the required admin accounts. Proceed? [Y/n] ")
if r and r[0] in ['n', 'N']:
sys.exit(1)
print("\nCreating SQLite database and ElasticSearch indices... ", end="")
- annotator.create_app()
+ app = annotator.create_app()
? ++++++
- annotator.create_all()
+ annotator.create_all(app)
? +++
print("done.\n")
username = raw_input("Admin username [admin]: ").strip()
if not username:
username = 'admin'
email = ''
while not email:
email = raw_input("Admin email: ").strip()
password = ''
while not password:
password = getpass("Admin password: ")
ckey = raw_input("Primary consumer key [annotateit]: ").strip()
if not ckey:
ckey = 'annotateit'
- with annotator.app.test_request_context():
? ----------
+ with app.test_request_context():
+ db = app.extensions['sqlalchemy'].db
print("\nCreating admin user... ", end="")
u = User(username, email, password)
- annotator.db.session.add(u)
? ----------
+ db.session.add(u)
- annotator.db.session.commit()
? ----------
+ db.session.commit()
print("done.")
print("Creating primary consumer... ", end="")
c = Consumer(ckey)
c.user_id = u.id
- annotator.db.session.add(c)
? ----------
+ db.session.add(c)
- annotator.db.session.commit()
? ----------
+ db.session.commit()
print("done.\n")
print("Primary consumer secret: %s" % c.secret)
|
c961fbf4be3152efc10d2d67d2f62fdae047ccab
|
datapipe/targets/filesystem.py
|
datapipe/targets/filesystem.py
|
import os
from ..target import Target
class LocalFile(Target):
def __init__(self, path):
self._path = path
super(LocalFile, self).__init__()
self._timestamp = 0
def identifier(self):
return self._path
def exists(self):
return os.path.exists(self._path)
def path(self):
return self._path
def open(self, *args, **kwargs):
return open(self._path, *args, **kwargs)
def store(self, batch=None):
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
super(LocalFile, self).store(batch)
def is_damaged(self):
stored = self.stored()
if stored is None:
return True
if self.exists():
return os.path.getmtime(self._path) > stored['timestamp']
else:
return True
|
import os
from ..target import Target
class LocalFile(Target):
def __init__(self, path):
self._path = path
super(LocalFile, self).__init__()
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
def identifier(self):
return self._path
def exists(self):
return os.path.exists(self._path)
def path(self):
return self._path
def store(self, batch=None):
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
super(LocalFile, self).store(batch)
def open(self, *args, **kwargs):
return open(self._path, *args, **kwargs)
def is_damaged(self):
mem = self.stored()
if mem is None or not 'timestamp' in mem:
return True
return self._memory['timestamp'] > mem['timestamp']
|
Fix unnecessary recomputation of file targets
|
Fix unnecessary recomputation of file targets
|
Python
|
mit
|
ibab/datapipe
|
import os
from ..target import Target
class LocalFile(Target):
def __init__(self, path):
self._path = path
super(LocalFile, self).__init__()
+ if self.exists():
+ self._memory['timestamp'] = os.path.getmtime(self._path)
+ else:
- self._timestamp = 0
+ self._memory['timestamp'] = 0
def identifier(self):
return self._path
def exists(self):
return os.path.exists(self._path)
def path(self):
return self._path
- def open(self, *args, **kwargs):
- return open(self._path, *args, **kwargs)
-
def store(self, batch=None):
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
super(LocalFile, self).store(batch)
+ def open(self, *args, **kwargs):
+ return open(self._path, *args, **kwargs)
+
def is_damaged(self):
- stored = self.stored()
+ mem = self.stored()
+ if mem is None or not 'timestamp' in mem:
- if stored is None:
- return True
- if self.exists():
- return os.path.getmtime(self._path) > stored['timestamp']
- else:
return True
+ return self._memory['timestamp'] > mem['timestamp']
+
|
Fix unnecessary recomputation of file targets
|
## Code Before:
import os
from ..target import Target
class LocalFile(Target):
def __init__(self, path):
self._path = path
super(LocalFile, self).__init__()
self._timestamp = 0
def identifier(self):
return self._path
def exists(self):
return os.path.exists(self._path)
def path(self):
return self._path
def open(self, *args, **kwargs):
return open(self._path, *args, **kwargs)
def store(self, batch=None):
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
super(LocalFile, self).store(batch)
def is_damaged(self):
stored = self.stored()
if stored is None:
return True
if self.exists():
return os.path.getmtime(self._path) > stored['timestamp']
else:
return True
## Instruction:
Fix unnecessary recomputation of file targets
## Code After:
import os
from ..target import Target
class LocalFile(Target):
def __init__(self, path):
self._path = path
super(LocalFile, self).__init__()
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
def identifier(self):
return self._path
def exists(self):
return os.path.exists(self._path)
def path(self):
return self._path
def store(self, batch=None):
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
super(LocalFile, self).store(batch)
def open(self, *args, **kwargs):
return open(self._path, *args, **kwargs)
def is_damaged(self):
mem = self.stored()
if mem is None or not 'timestamp' in mem:
return True
return self._memory['timestamp'] > mem['timestamp']
|
import os
from ..target import Target
class LocalFile(Target):
def __init__(self, path):
self._path = path
super(LocalFile, self).__init__()
+ if self.exists():
+ self._memory['timestamp'] = os.path.getmtime(self._path)
+ else:
- self._timestamp = 0
+ self._memory['timestamp'] = 0
? ++++ ++++++++ ++
def identifier(self):
return self._path
def exists(self):
return os.path.exists(self._path)
def path(self):
return self._path
- def open(self, *args, **kwargs):
- return open(self._path, *args, **kwargs)
-
def store(self, batch=None):
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
super(LocalFile, self).store(batch)
+ def open(self, *args, **kwargs):
+ return open(self._path, *args, **kwargs)
+
def is_damaged(self):
- stored = self.stored()
? ^^^^ ^
+ mem = self.stored()
? ^ ^
+ if mem is None or not 'timestamp' in mem:
- if stored is None:
- return True
- if self.exists():
- return os.path.getmtime(self._path) > stored['timestamp']
- else:
return True
+ return self._memory['timestamp'] > mem['timestamp']
+
|
02668b8dfda3c00f4ae74846d7c14c5dde64e17c
|
asciitree/__init__.py
|
asciitree/__init__.py
|
from legacy import draw_tree, _draw_tree
def ascii_tree(node,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return _draw_tree(get_root(node), '', get_node_children, get_node_text)
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
from legacy import draw_tree, _draw_tree
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
Remove now obsolete ascii_tree function.
|
Remove now obsolete ascii_tree function.
|
Python
|
mit
|
mbr/asciitree
|
from legacy import draw_tree, _draw_tree
-
-
- def ascii_tree(node,
- get_node_children=lambda t: t[1].items(),
- get_node_text=lambda t: t[0],
- get_root=lambda d: d.items()[0]):
- return _draw_tree(get_root(node), '', get_node_children, get_node_text)
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
Remove now obsolete ascii_tree function.
|
## Code Before:
from legacy import draw_tree, _draw_tree
def ascii_tree(node,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return _draw_tree(get_root(node), '', get_node_children, get_node_text)
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
## Instruction:
Remove now obsolete ascii_tree function.
## Code After:
from legacy import draw_tree, _draw_tree
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
from legacy import draw_tree, _draw_tree
-
-
- def ascii_tree(node,
- get_node_children=lambda t: t[1].items(),
- get_node_text=lambda t: t[0],
- get_root=lambda d: d.items()[0]):
- return _draw_tree(get_root(node), '', get_node_children, get_node_text)
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
be52bd5e578c54a816f8b786da5d8cf22fcc3ca8
|
paver_ext/python_requirements.py
|
paver_ext/python_requirements.py
|
from paver.easy import error
import os.path
# ----------------------------------------------------------------------------
# UTILS:
# ----------------------------------------------------------------------------
def read_requirements(*filenames):
"""
Read PIP "requirements*.txt" files.
These files contains python package requirements.
:param filenames: List of requirement files to read.
:returns: List of packages/package requirements (list-of-strings).
"""
package_requirements = []
for filename in filenames:
if not os.path.exists(filename):
error("REQUIREMENT-FILE %s not found" % filename)
continue
# -- NORMAL CASE:
requirements_file = open(filename, "r")
for line in requirements_file.readlines():
line = line.strip()
if not line or line.startswith("#"):
continue #< SKIP: EMPTY-LINE or COMMENT-LINE
package_requirements.append(line)
requirements_file.close()
return package_requirements
|
from paver.easy import error
import os.path
import pkg_resources
# ----------------------------------------------------------------------------
# UTILS:
# ----------------------------------------------------------------------------
def read_requirements(*filenames):
"""
Read PIP "requirements*.txt" files.
These files contains python package requirements.
:param filenames: List of requirement files to read.
:returns: List of packages/package requirements (list-of-strings).
"""
package_requirements = []
for filename in filenames:
if not os.path.exists(filename):
error("REQUIREMENT-FILE %s not found" % filename)
continue
# -- NORMAL CASE:
with open(filename, "r") as f:
requirements = pkg_resources.parse_requirements(f.read())
package_requirements.extend(requirements)
# # -- NORMAL CASE:
# requirements_file = open(filename, "r")
# for line in requirements_file.readlines():
# line = line.strip()
# if not line or line.startswith("#"):
# continue #< SKIP: EMPTY-LINE or COMMENT-LINE
# package_requirements.append(line)
# requirements_file.close()
return package_requirements
|
Use pkg_resources.parse_requirements() to simplify parsing.
|
Use pkg_resources.parse_requirements() to simplify parsing.
|
Python
|
bsd-2-clause
|
jenisys/behave,jenisys/behave
|
from paver.easy import error
import os.path
+ import pkg_resources
# ----------------------------------------------------------------------------
# UTILS:
# ----------------------------------------------------------------------------
def read_requirements(*filenames):
"""
Read PIP "requirements*.txt" files.
These files contains python package requirements.
:param filenames: List of requirement files to read.
:returns: List of packages/package requirements (list-of-strings).
"""
package_requirements = []
for filename in filenames:
if not os.path.exists(filename):
error("REQUIREMENT-FILE %s not found" % filename)
continue
# -- NORMAL CASE:
+ with open(filename, "r") as f:
+ requirements = pkg_resources.parse_requirements(f.read())
+ package_requirements.extend(requirements)
+ # # -- NORMAL CASE:
- requirements_file = open(filename, "r")
+ # requirements_file = open(filename, "r")
- for line in requirements_file.readlines():
+ # for line in requirements_file.readlines():
- line = line.strip()
+ # line = line.strip()
- if not line or line.startswith("#"):
+ # if not line or line.startswith("#"):
- continue #< SKIP: EMPTY-LINE or COMMENT-LINE
+ # continue #< SKIP: EMPTY-LINE or COMMENT-LINE
- package_requirements.append(line)
+ # package_requirements.append(line)
- requirements_file.close()
+ # requirements_file.close()
return package_requirements
|
Use pkg_resources.parse_requirements() to simplify parsing.
|
## Code Before:
from paver.easy import error
import os.path
# ----------------------------------------------------------------------------
# UTILS:
# ----------------------------------------------------------------------------
def read_requirements(*filenames):
"""
Read PIP "requirements*.txt" files.
These files contains python package requirements.
:param filenames: List of requirement files to read.
:returns: List of packages/package requirements (list-of-strings).
"""
package_requirements = []
for filename in filenames:
if not os.path.exists(filename):
error("REQUIREMENT-FILE %s not found" % filename)
continue
# -- NORMAL CASE:
requirements_file = open(filename, "r")
for line in requirements_file.readlines():
line = line.strip()
if not line or line.startswith("#"):
continue #< SKIP: EMPTY-LINE or COMMENT-LINE
package_requirements.append(line)
requirements_file.close()
return package_requirements
## Instruction:
Use pkg_resources.parse_requirements() to simplify parsing.
## Code After:
from paver.easy import error
import os.path
import pkg_resources
# ----------------------------------------------------------------------------
# UTILS:
# ----------------------------------------------------------------------------
def read_requirements(*filenames):
"""
Read PIP "requirements*.txt" files.
These files contains python package requirements.
:param filenames: List of requirement files to read.
:returns: List of packages/package requirements (list-of-strings).
"""
package_requirements = []
for filename in filenames:
if not os.path.exists(filename):
error("REQUIREMENT-FILE %s not found" % filename)
continue
# -- NORMAL CASE:
with open(filename, "r") as f:
requirements = pkg_resources.parse_requirements(f.read())
package_requirements.extend(requirements)
# # -- NORMAL CASE:
# requirements_file = open(filename, "r")
# for line in requirements_file.readlines():
# line = line.strip()
# if not line or line.startswith("#"):
# continue #< SKIP: EMPTY-LINE or COMMENT-LINE
# package_requirements.append(line)
# requirements_file.close()
return package_requirements
|
from paver.easy import error
import os.path
+ import pkg_resources
# ----------------------------------------------------------------------------
# UTILS:
# ----------------------------------------------------------------------------
def read_requirements(*filenames):
"""
Read PIP "requirements*.txt" files.
These files contains python package requirements.
:param filenames: List of requirement files to read.
:returns: List of packages/package requirements (list-of-strings).
"""
package_requirements = []
for filename in filenames:
if not os.path.exists(filename):
error("REQUIREMENT-FILE %s not found" % filename)
continue
# -- NORMAL CASE:
+ with open(filename, "r") as f:
+ requirements = pkg_resources.parse_requirements(f.read())
+ package_requirements.extend(requirements)
+ # # -- NORMAL CASE:
- requirements_file = open(filename, "r")
+ # requirements_file = open(filename, "r")
? +
- for line in requirements_file.readlines():
+ # for line in requirements_file.readlines():
? +
- line = line.strip()
+ # line = line.strip()
? +
- if not line or line.startswith("#"):
+ # if not line or line.startswith("#"):
? +
- continue #< SKIP: EMPTY-LINE or COMMENT-LINE
+ # continue #< SKIP: EMPTY-LINE or COMMENT-LINE
? +
- package_requirements.append(line)
+ # package_requirements.append(line)
? +
- requirements_file.close()
+ # requirements_file.close()
? +
return package_requirements
|
5225392a305e8e83a5a0fae91d3c2090914f2e5c
|
resolwe/flow/executors/docker.py
|
resolwe/flow/executors/docker.py
|
"""Local workflow executor"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shlex
import subprocess
from django.conf import settings
from .local import FlowExecutor as LocalFlowExecutor
class FlowExecutor(LocalFlowExecutor):
def start(self):
container_image = settings.FLOW_EXECUTOR['CONTAINER_IMAGE']
self.proc = subprocess.Popen(
shlex.split('docker run --rm --interactive --name={} {} /bin/bash'.format(self.data_id, container_image)),
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
self.stdout = self.proc.stdout
def run_script(self, script):
self.proc.stdin.write(os.linesep.join(['set -x', 'set +B', script]) + os.linesep)
self.proc.stdin.close()
def end(self):
self.proc.wait()
return self.proc.returncode
def terminate(self, data_id):
subprocess.call(shlex.split('docker rm -f {}').format(str(data_id)))
|
"""Local workflow executor"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import random
import shlex
import subprocess
from django.conf import settings
from .local import FlowExecutor as LocalFlowExecutor
class FlowExecutor(LocalFlowExecutor):
def start(self):
container_image = settings.FLOW_EXECUTOR['CONTAINER_IMAGE']
if self.data_id != 'no_data_id':
container_name = 'resolwe_{}'.format(self.data_id)
else:
# set random container name for tests
rand_int = random.randint(1000, 9999)
container_name = 'resolwe_test_{}'.format(rand_int)
self.proc = subprocess.Popen(
shlex.split(
'docker run --rm --interactive --name={} {} /bin/bash'.format(container_name, container_image)),
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
self.stdout = self.proc.stdout
def run_script(self, script):
self.proc.stdin.write(os.linesep.join(['set -x', 'set +B', script]) + os.linesep)
self.proc.stdin.close()
def end(self):
self.proc.wait()
return self.proc.returncode
def terminate(self, data_id):
subprocess.call(shlex.split('docker rm -f {}').format(str(data_id)))
|
Set random container name for tests
|
Set random container name for tests
|
Python
|
apache-2.0
|
jberci/resolwe,jberci/resolwe,genialis/resolwe,genialis/resolwe
|
"""Local workflow executor"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
+ import random
import shlex
import subprocess
from django.conf import settings
from .local import FlowExecutor as LocalFlowExecutor
class FlowExecutor(LocalFlowExecutor):
def start(self):
container_image = settings.FLOW_EXECUTOR['CONTAINER_IMAGE']
+
+ if self.data_id != 'no_data_id':
+ container_name = 'resolwe_{}'.format(self.data_id)
+ else:
+ # set random container name for tests
+ rand_int = random.randint(1000, 9999)
+ container_name = 'resolwe_test_{}'.format(rand_int)
+
self.proc = subprocess.Popen(
+ shlex.split(
- shlex.split('docker run --rm --interactive --name={} {} /bin/bash'.format(self.data_id, container_image)),
+ 'docker run --rm --interactive --name={} {} /bin/bash'.format(container_name, container_image)),
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
self.stdout = self.proc.stdout
def run_script(self, script):
self.proc.stdin.write(os.linesep.join(['set -x', 'set +B', script]) + os.linesep)
self.proc.stdin.close()
def end(self):
self.proc.wait()
return self.proc.returncode
def terminate(self, data_id):
subprocess.call(shlex.split('docker rm -f {}').format(str(data_id)))
|
Set random container name for tests
|
## Code Before:
"""Local workflow executor"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shlex
import subprocess
from django.conf import settings
from .local import FlowExecutor as LocalFlowExecutor
class FlowExecutor(LocalFlowExecutor):
def start(self):
container_image = settings.FLOW_EXECUTOR['CONTAINER_IMAGE']
self.proc = subprocess.Popen(
shlex.split('docker run --rm --interactive --name={} {} /bin/bash'.format(self.data_id, container_image)),
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
self.stdout = self.proc.stdout
def run_script(self, script):
self.proc.stdin.write(os.linesep.join(['set -x', 'set +B', script]) + os.linesep)
self.proc.stdin.close()
def end(self):
self.proc.wait()
return self.proc.returncode
def terminate(self, data_id):
subprocess.call(shlex.split('docker rm -f {}').format(str(data_id)))
## Instruction:
Set random container name for tests
## Code After:
"""Local workflow executor"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import random
import shlex
import subprocess
from django.conf import settings
from .local import FlowExecutor as LocalFlowExecutor
class FlowExecutor(LocalFlowExecutor):
def start(self):
container_image = settings.FLOW_EXECUTOR['CONTAINER_IMAGE']
if self.data_id != 'no_data_id':
container_name = 'resolwe_{}'.format(self.data_id)
else:
# set random container name for tests
rand_int = random.randint(1000, 9999)
container_name = 'resolwe_test_{}'.format(rand_int)
self.proc = subprocess.Popen(
shlex.split(
'docker run --rm --interactive --name={} {} /bin/bash'.format(container_name, container_image)),
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
self.stdout = self.proc.stdout
def run_script(self, script):
self.proc.stdin.write(os.linesep.join(['set -x', 'set +B', script]) + os.linesep)
self.proc.stdin.close()
def end(self):
self.proc.wait()
return self.proc.returncode
def terminate(self, data_id):
subprocess.call(shlex.split('docker rm -f {}').format(str(data_id)))
|
"""Local workflow executor"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
+ import random
import shlex
import subprocess
from django.conf import settings
from .local import FlowExecutor as LocalFlowExecutor
class FlowExecutor(LocalFlowExecutor):
def start(self):
container_image = settings.FLOW_EXECUTOR['CONTAINER_IMAGE']
+
+ if self.data_id != 'no_data_id':
+ container_name = 'resolwe_{}'.format(self.data_id)
+ else:
+ # set random container name for tests
+ rand_int = random.randint(1000, 9999)
+ container_name = 'resolwe_test_{}'.format(rand_int)
+
self.proc = subprocess.Popen(
+ shlex.split(
- shlex.split('docker run --rm --interactive --name={} {} /bin/bash'.format(self.data_id, container_image)),
? ^^^^^^^^^^^^ ^^^^^^^ ^^
+ 'docker run --rm --interactive --name={} {} /bin/bash'.format(container_name, container_image)),
? ^^^^ ^^^ ++++ ^^^^
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
self.stdout = self.proc.stdout
def run_script(self, script):
self.proc.stdin.write(os.linesep.join(['set -x', 'set +B', script]) + os.linesep)
self.proc.stdin.close()
def end(self):
self.proc.wait()
return self.proc.returncode
def terminate(self, data_id):
subprocess.call(shlex.split('docker rm -f {}').format(str(data_id)))
|
a6f8e42d3e297776a19c8e76dd7f1cfded32a266
|
pycon/tutorials/tests/test_utils.py
|
pycon/tutorials/tests/test_utils.py
|
"""Test for the tutorials.utils package"""
import datetime
import unittest
from mock import patch
from django.template import Template
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
class TestSendEmailMessage(unittest.TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
|
"""Test for the tutorials.utils package"""
import datetime
from mock import patch
from django.template import Template
from django.test import TestCase
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
class TestSendEmailMessage(TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
|
Use django TestCase in tutorial send email test
|
Use django TestCase in tutorial send email test
It was using regular Python unittest.TestCase for some
reason, resulting in leaving old BulkEmail objects in
the database that other tests weren't expecting.
|
Python
|
bsd-3-clause
|
PyCon/pycon,PyCon/pycon,PyCon/pycon,njl/pycon,PyCon/pycon,njl/pycon,njl/pycon,njl/pycon
|
"""Test for the tutorials.utils package"""
import datetime
- import unittest
from mock import patch
from django.template import Template
+ from django.test import TestCase
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
- class TestSendEmailMessage(unittest.TestCase):
+ class TestSendEmailMessage(TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
|
Use django TestCase in tutorial send email test
|
## Code Before:
"""Test for the tutorials.utils package"""
import datetime
import unittest
from mock import patch
from django.template import Template
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
class TestSendEmailMessage(unittest.TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
## Instruction:
Use django TestCase in tutorial send email test
## Code After:
"""Test for the tutorials.utils package"""
import datetime
from mock import patch
from django.template import Template
from django.test import TestCase
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
class TestSendEmailMessage(TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
|
"""Test for the tutorials.utils package"""
import datetime
- import unittest
from mock import patch
from django.template import Template
+ from django.test import TestCase
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
- class TestSendEmailMessage(unittest.TestCase):
? ---------
+ class TestSendEmailMessage(TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
|
de8b35e5c0a3c5f1427bbdcf3b60bd3e915cf0ad
|
xorcise/__init__.py
|
xorcise/__init__.py
|
import curses
from .character import Character
from .console import Console
from .line import Line
from .attribute import RenditionAttribute, ColorAttribute
from .misc import ESCAPE_CHARS, DELETE_CHARS, BACKSPACE_CHARS, \
is_printable_char, char_with_control_key
__console = None
def turn_on_console(asciize=False, spaces_per_tab=4):
Line.ASCIIZE = asciize
Line.SPACES_PER_TAB = spaces_per_tab
window = curses.initscr()
curses.noecho()
curses.cbreak()
curses.start_color()
curses.use_default_colors()
ColorAttribute.initialize()
global __console
__console = Console(window)
return __console
def turn_off_console():
curses.nocbreak()
curses.echo()
curses.endwin()
|
import curses
from .character import Character
from .console import Console
from .line import Line
from .attribute import RenditionAttribute, ColorAttribute
from .misc import ESCAPE_CHARS, DELETE_CHARS, BACKSPACE_CHARS, \
is_printable_char, char_with_control_key
def turn_on_console(asciize=False, spaces_per_tab=4):
Line.ASCIIZE = asciize
Line.SPACES_PER_TAB = spaces_per_tab
window = curses.initscr()
curses.noecho()
curses.cbreak()
curses.start_color()
curses.use_default_colors()
ColorAttribute.initialize()
return Console(window)
def turn_off_console():
curses.nocbreak()
curses.echo()
curses.endwin()
|
Remove an extra global variable in xorcise package
|
Remove an extra global variable in xorcise package
|
Python
|
unlicense
|
raviqqe/shakyo
|
import curses
from .character import Character
from .console import Console
from .line import Line
from .attribute import RenditionAttribute, ColorAttribute
from .misc import ESCAPE_CHARS, DELETE_CHARS, BACKSPACE_CHARS, \
is_printable_char, char_with_control_key
-
-
- __console = None
def turn_on_console(asciize=False, spaces_per_tab=4):
Line.ASCIIZE = asciize
Line.SPACES_PER_TAB = spaces_per_tab
window = curses.initscr()
curses.noecho()
curses.cbreak()
curses.start_color()
curses.use_default_colors()
ColorAttribute.initialize()
+ return Console(window)
- global __console
- __console = Console(window)
-
- return __console
def turn_off_console():
curses.nocbreak()
curses.echo()
curses.endwin()
|
Remove an extra global variable in xorcise package
|
## Code Before:
import curses
from .character import Character
from .console import Console
from .line import Line
from .attribute import RenditionAttribute, ColorAttribute
from .misc import ESCAPE_CHARS, DELETE_CHARS, BACKSPACE_CHARS, \
is_printable_char, char_with_control_key
__console = None
def turn_on_console(asciize=False, spaces_per_tab=4):
Line.ASCIIZE = asciize
Line.SPACES_PER_TAB = spaces_per_tab
window = curses.initscr()
curses.noecho()
curses.cbreak()
curses.start_color()
curses.use_default_colors()
ColorAttribute.initialize()
global __console
__console = Console(window)
return __console
def turn_off_console():
curses.nocbreak()
curses.echo()
curses.endwin()
## Instruction:
Remove an extra global variable in xorcise package
## Code After:
import curses
from .character import Character
from .console import Console
from .line import Line
from .attribute import RenditionAttribute, ColorAttribute
from .misc import ESCAPE_CHARS, DELETE_CHARS, BACKSPACE_CHARS, \
is_printable_char, char_with_control_key
def turn_on_console(asciize=False, spaces_per_tab=4):
Line.ASCIIZE = asciize
Line.SPACES_PER_TAB = spaces_per_tab
window = curses.initscr()
curses.noecho()
curses.cbreak()
curses.start_color()
curses.use_default_colors()
ColorAttribute.initialize()
return Console(window)
def turn_off_console():
curses.nocbreak()
curses.echo()
curses.endwin()
|
import curses
from .character import Character
from .console import Console
from .line import Line
from .attribute import RenditionAttribute, ColorAttribute
from .misc import ESCAPE_CHARS, DELETE_CHARS, BACKSPACE_CHARS, \
is_printable_char, char_with_control_key
-
-
- __console = None
def turn_on_console(asciize=False, spaces_per_tab=4):
Line.ASCIIZE = asciize
Line.SPACES_PER_TAB = spaces_per_tab
window = curses.initscr()
curses.noecho()
curses.cbreak()
curses.start_color()
curses.use_default_colors()
ColorAttribute.initialize()
+ return Console(window)
- global __console
- __console = Console(window)
-
- return __console
def turn_off_console():
curses.nocbreak()
curses.echo()
curses.endwin()
|
d3a9657b7318327a59c3eee08a25f1e5c4ba4edf
|
django_casscache.py
|
django_casscache.py
|
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
|
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
Add a method to noop the make_key in Django
|
Add a method to noop the make_key in Django
|
Python
|
bsd-3-clause
|
mattrobenolt/django-casscache
|
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
+
+ def noop_make_key(key, *args, **kwargs):
+ """
+ For use with KEY_FUNCTION, to not alter the key name at all.
+ """
+ return key
+
|
Add a method to noop the make_key in Django
|
## Code Before:
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
## Instruction:
Add a method to noop the make_key in Django
## Code After:
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
+
+
+ def noop_make_key(key, *args, **kwargs):
+ """
+ For use with KEY_FUNCTION, to not alter the key name at all.
+ """
+ return key
|
d7087cb309c028bdd56cf4c605d7c60eac3d4c0c
|
utils/custom_context.py
|
utils/custom_context.py
|
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description,
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description,
)
await self.send(embed=em)
|
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description or '',
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description or '',
)
await self.send(embed=em)
|
Remove 'None' in embed description
|
Remove 'None' in embed description
|
Python
|
mit
|
Naught0/qtbot
|
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
- description=description,
+ description=description or '',
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
- description=description,
+ description=description or '',
)
await self.send(embed=em)
|
Remove 'None' in embed description
|
## Code Before:
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description,
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description,
)
await self.send(embed=em)
## Instruction:
Remove 'None' in embed description
## Code After:
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description or '',
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description or '',
)
await self.send(embed=em)
|
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
- description=description,
+ description=description or '',
? ++++++
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
- description=description,
+ description=description or '',
? ++++++
)
await self.send(embed=em)
|
d33d791a5e90ab1a389d85b5e93df7f07167eb5b
|
tests/integration/test_home_page.py
|
tests/integration/test_home_page.py
|
from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flash - Flask Dashboard'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
|
from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flask + Dashboard = Flash'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
|
Update tests to reflect title change
|
Update tests to reflect title change
|
Python
|
isc
|
textbook/flash,textbook/flash,textbook/flash
|
from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
- 'Flash - Flask Dashboard'
+ 'Flask + Dashboard = Flash'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
|
Update tests to reflect title change
|
## Code Before:
from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flash - Flask Dashboard'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
## Instruction:
Update tests to reflect title change
## Code After:
from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flask + Dashboard = Flash'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
|
from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
- 'Flash - Flask Dashboard'
+ 'Flask + Dashboard = Flash'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
|
2f60d4665a960578ab97bdaf313893ec366c24f1
|
kdb/default_config.py
|
kdb/default_config.py
|
CONFIG = {
"server": {
"host": "irc.freenode.net",
"port": 6667
},
"bot": {
"nick": "kdb",
"ident": "kdb",
"name": "Knowledge Database Bot",
"channels": "#circuits",
},
"plugins": {
"broadcast.*": "enabled",
"channels.*": "enabled",
"core.*": "enabled",
"ctcp.*": "enabled",
"dnstools.*": "enabled",
"eval.*": "enabled",
"google.*": "enabled",
"greeting.*": "enabled",
"help.*": "enabled",
"irc.*": "enabled",
"stats.*": "enabled",
"swatch.*": "enabled",
"timers.*": "enabled",
},
}
|
CONFIG = {
"server": {
"host": "irc.freenode.net",
"port": 6667
},
"bot": {
"nick": "kdb",
"ident": "kdb",
"name": "Knowledge Database Bot",
"channels": "#circuits",
},
"plugins": {
"broadcast.*": "enabled",
"channels.*": "enabled",
"core.*": "enabled",
"ctcp.*": "enabled",
"dnstools.*": "enabled",
"eval.*": "enabled",
"google.*": "enabled",
"greeting.*": "enabled",
"help.*": "enabled",
"irc.*": "enabled",
"remote.*": "enabled",
"rmessage.*": "enabled",
"rnotify.*": "enabled",
"stats.*": "enabled",
"swatch.*": "enabled",
"timers.*": "enabled",
},
}
|
Enable remote, rmessage and rnotify plugins by default
|
Enable remote, rmessage and rnotify plugins by default
|
Python
|
mit
|
prologic/kdb,prologic/kdb,prologic/kdb
|
CONFIG = {
"server": {
"host": "irc.freenode.net",
"port": 6667
},
"bot": {
"nick": "kdb",
"ident": "kdb",
"name": "Knowledge Database Bot",
"channels": "#circuits",
},
"plugins": {
"broadcast.*": "enabled",
"channels.*": "enabled",
"core.*": "enabled",
"ctcp.*": "enabled",
"dnstools.*": "enabled",
"eval.*": "enabled",
"google.*": "enabled",
"greeting.*": "enabled",
"help.*": "enabled",
"irc.*": "enabled",
+ "remote.*": "enabled",
+ "rmessage.*": "enabled",
+ "rnotify.*": "enabled",
"stats.*": "enabled",
"swatch.*": "enabled",
"timers.*": "enabled",
},
}
|
Enable remote, rmessage and rnotify plugins by default
|
## Code Before:
CONFIG = {
"server": {
"host": "irc.freenode.net",
"port": 6667
},
"bot": {
"nick": "kdb",
"ident": "kdb",
"name": "Knowledge Database Bot",
"channels": "#circuits",
},
"plugins": {
"broadcast.*": "enabled",
"channels.*": "enabled",
"core.*": "enabled",
"ctcp.*": "enabled",
"dnstools.*": "enabled",
"eval.*": "enabled",
"google.*": "enabled",
"greeting.*": "enabled",
"help.*": "enabled",
"irc.*": "enabled",
"stats.*": "enabled",
"swatch.*": "enabled",
"timers.*": "enabled",
},
}
## Instruction:
Enable remote, rmessage and rnotify plugins by default
## Code After:
CONFIG = {
"server": {
"host": "irc.freenode.net",
"port": 6667
},
"bot": {
"nick": "kdb",
"ident": "kdb",
"name": "Knowledge Database Bot",
"channels": "#circuits",
},
"plugins": {
"broadcast.*": "enabled",
"channels.*": "enabled",
"core.*": "enabled",
"ctcp.*": "enabled",
"dnstools.*": "enabled",
"eval.*": "enabled",
"google.*": "enabled",
"greeting.*": "enabled",
"help.*": "enabled",
"irc.*": "enabled",
"remote.*": "enabled",
"rmessage.*": "enabled",
"rnotify.*": "enabled",
"stats.*": "enabled",
"swatch.*": "enabled",
"timers.*": "enabled",
},
}
|
CONFIG = {
"server": {
"host": "irc.freenode.net",
"port": 6667
},
"bot": {
"nick": "kdb",
"ident": "kdb",
"name": "Knowledge Database Bot",
"channels": "#circuits",
},
"plugins": {
"broadcast.*": "enabled",
"channels.*": "enabled",
"core.*": "enabled",
"ctcp.*": "enabled",
"dnstools.*": "enabled",
"eval.*": "enabled",
"google.*": "enabled",
"greeting.*": "enabled",
"help.*": "enabled",
"irc.*": "enabled",
+ "remote.*": "enabled",
+ "rmessage.*": "enabled",
+ "rnotify.*": "enabled",
"stats.*": "enabled",
"swatch.*": "enabled",
"timers.*": "enabled",
},
}
|
bbd8b027eecc48266dfeee12419a6bcd807bdf65
|
tests/__init__.py
|
tests/__init__.py
|
import os
import unittest
import pytest
class ScraperTest(unittest.TestCase):
online = False
test_file_name = None
def setUp(self):
os.environ[
"RECIPE_SCRAPERS_SETTINGS"
] = "tests.test_data.test_settings_module.test_settings"
test_file_name = (
self.test_file_name
if self.test_file_name
else self.scraper_class.__name__.lower()
)
with open(
"tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8"
) as testfile:
self.harvester_class = self.scraper_class(testfile)
canonical_url = self.harvester_class.canonical_url()
if self.online:
if not canonical_url:
pytest.skip(
f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'"
)
self.harvester_class = self.scraper_class(url=canonical_url)
|
import os
import unittest
import pytest
class ScraperTest(unittest.TestCase):
maxDiff = None
online = False
test_file_name = None
def setUp(self):
os.environ[
"RECIPE_SCRAPERS_SETTINGS"
] = "tests.test_data.test_settings_module.test_settings"
test_file_name = (
self.test_file_name
if self.test_file_name
else self.scraper_class.__name__.lower()
)
with open(
"tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8"
) as testfile:
self.harvester_class = self.scraper_class(testfile)
canonical_url = self.harvester_class.canonical_url()
if self.online:
if not canonical_url:
pytest.skip(
f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'"
)
self.harvester_class = self.scraper_class(url=canonical_url)
|
Set maxDiff to 'None' on the base ScraperTest class
|
Set maxDiff to 'None' on the base ScraperTest class
|
Python
|
mit
|
hhursev/recipe-scraper
|
import os
import unittest
import pytest
class ScraperTest(unittest.TestCase):
+ maxDiff = None
online = False
test_file_name = None
def setUp(self):
os.environ[
"RECIPE_SCRAPERS_SETTINGS"
] = "tests.test_data.test_settings_module.test_settings"
test_file_name = (
self.test_file_name
if self.test_file_name
else self.scraper_class.__name__.lower()
)
with open(
"tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8"
) as testfile:
self.harvester_class = self.scraper_class(testfile)
canonical_url = self.harvester_class.canonical_url()
if self.online:
if not canonical_url:
pytest.skip(
f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'"
)
self.harvester_class = self.scraper_class(url=canonical_url)
|
Set maxDiff to 'None' on the base ScraperTest class
|
## Code Before:
import os
import unittest
import pytest
class ScraperTest(unittest.TestCase):
online = False
test_file_name = None
def setUp(self):
os.environ[
"RECIPE_SCRAPERS_SETTINGS"
] = "tests.test_data.test_settings_module.test_settings"
test_file_name = (
self.test_file_name
if self.test_file_name
else self.scraper_class.__name__.lower()
)
with open(
"tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8"
) as testfile:
self.harvester_class = self.scraper_class(testfile)
canonical_url = self.harvester_class.canonical_url()
if self.online:
if not canonical_url:
pytest.skip(
f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'"
)
self.harvester_class = self.scraper_class(url=canonical_url)
## Instruction:
Set maxDiff to 'None' on the base ScraperTest class
## Code After:
import os
import unittest
import pytest
class ScraperTest(unittest.TestCase):
maxDiff = None
online = False
test_file_name = None
def setUp(self):
os.environ[
"RECIPE_SCRAPERS_SETTINGS"
] = "tests.test_data.test_settings_module.test_settings"
test_file_name = (
self.test_file_name
if self.test_file_name
else self.scraper_class.__name__.lower()
)
with open(
"tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8"
) as testfile:
self.harvester_class = self.scraper_class(testfile)
canonical_url = self.harvester_class.canonical_url()
if self.online:
if not canonical_url:
pytest.skip(
f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'"
)
self.harvester_class = self.scraper_class(url=canonical_url)
|
import os
import unittest
import pytest
class ScraperTest(unittest.TestCase):
+ maxDiff = None
online = False
test_file_name = None
def setUp(self):
os.environ[
"RECIPE_SCRAPERS_SETTINGS"
] = "tests.test_data.test_settings_module.test_settings"
test_file_name = (
self.test_file_name
if self.test_file_name
else self.scraper_class.__name__.lower()
)
with open(
"tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8"
) as testfile:
self.harvester_class = self.scraper_class(testfile)
canonical_url = self.harvester_class.canonical_url()
if self.online:
if not canonical_url:
pytest.skip(
f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'"
)
self.harvester_class = self.scraper_class(url=canonical_url)
|
65ed7106126effc922df2bf7252a3c840d9bc768
|
hasjob/__init__.py
|
hasjob/__init__.py
|
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
from uploads import configure
from search import configure
uploads.configure()
search.configure()
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
# Imported here to prevent circular imports
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
Remove duplicate code, leave comment about circular imports
|
Remove duplicate code, leave comment about circular imports
|
Python
|
agpl-3.0
|
hasgeek/hasjob,ashwin01/hasjob,nhannv/hasjob,qitianchan/hasjob,ashwin01/hasjob,hasgeek/hasjob,qitianchan/hasjob,sindhus/hasjob,sindhus/hasjob,hasgeek/hasjob,sindhus/hasjob,ashwin01/hasjob,ashwin01/hasjob,sindhus/hasjob,qitianchan/hasjob,nhannv/hasjob,qitianchan/hasjob,sindhus/hasjob,qitianchan/hasjob,nhannv/hasjob,hasgeek/hasjob,nhannv/hasjob,ashwin01/hasjob,nhannv/hasjob
|
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
+ # Imported here to prevent circular imports
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
- from uploads import configure
- from search import configure
- uploads.configure()
- search.configure()
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
Remove duplicate code, leave comment about circular imports
|
## Code Before:
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
from uploads import configure
from search import configure
uploads.configure()
search.configure()
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
## Instruction:
Remove duplicate code, leave comment about circular imports
## Code After:
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
# Imported here to prevent circular imports
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
+ # Imported here to prevent circular imports
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
- from uploads import configure
- from search import configure
- uploads.configure()
- search.configure()
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
c5049b9bf465aee93d4c87b9cd62608d338ede7f
|
robokassa/migrations/0003_load_source_type.py
|
robokassa/migrations/0003_load_source_type.py
|
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
depends_on = (
('payment', '0003_auto__chg_field_sourcetype_code__add_unique_sourcetype_code'),
)
def forwards(self, orm):
orm['payment.SourceType'].objects.get_or_create(code='robokassa', defaults=dict(name=u'Робокасса'))
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'payment.sourcetype': {
'Meta': {'object_name': 'SourceType'},
'code': ('oscar.models.fields.autoslugfield.AutoSlugField', [],
{'allow_duplicates': 'False', 'max_length': '128', 'separator': "u'-'", 'blank': 'True',
'unique': 'True', 'populate_from': "'name'", 'overwrite': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
Add the code that correctly adds the new payment source.
|
Add the code that correctly adds the new payment source.
|
Python
|
mit
|
a-iv/django-oscar-robokassa
|
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
+ depends_on = (
+ ('payment', '0003_auto__chg_field_sourcetype_code__add_unique_sourcetype_code'),
+ )
+
def forwards(self, orm):
- pass
+ orm['payment.SourceType'].objects.get_or_create(code='robokassa', defaults=dict(name=u'Робокасса'))
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
+ },
+ u'payment.sourcetype': {
+ 'Meta': {'object_name': 'SourceType'},
+ 'code': ('oscar.models.fields.autoslugfield.AutoSlugField', [],
+ {'allow_duplicates': 'False', 'max_length': '128', 'separator': "u'-'", 'blank': 'True',
+ 'unique': 'True', 'populate_from': "'name'", 'overwrite': 'False'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
Add the code that correctly adds the new payment source.
|
## Code Before:
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
## Instruction:
Add the code that correctly adds the new payment source.
## Code After:
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
depends_on = (
('payment', '0003_auto__chg_field_sourcetype_code__add_unique_sourcetype_code'),
)
def forwards(self, orm):
orm['payment.SourceType'].objects.get_or_create(code='robokassa', defaults=dict(name=u'Робокасса'))
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'payment.sourcetype': {
'Meta': {'object_name': 'SourceType'},
'code': ('oscar.models.fields.autoslugfield.AutoSlugField', [],
{'allow_duplicates': 'False', 'max_length': '128', 'separator': "u'-'", 'blank': 'True',
'unique': 'True', 'populate_from': "'name'", 'overwrite': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
+ depends_on = (
+ ('payment', '0003_auto__chg_field_sourcetype_code__add_unique_sourcetype_code'),
+ )
+
def forwards(self, orm):
- pass
+ orm['payment.SourceType'].objects.get_or_create(code='robokassa', defaults=dict(name=u'Робокасса'))
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
+ },
+ u'payment.sourcetype': {
+ 'Meta': {'object_name': 'SourceType'},
+ 'code': ('oscar.models.fields.autoslugfield.AutoSlugField', [],
+ {'allow_duplicates': 'False', 'max_length': '128', 'separator': "u'-'", 'blank': 'True',
+ 'unique': 'True', 'populate_from': "'name'", 'overwrite': 'False'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
d4398d068d4fdf6364869cd01237f53438e2674c
|
blinkylib/blinkytape.py
|
blinkylib/blinkytape.py
|
import blinkycolor
import serial
class BlinkyTape(object):
def __init__(self, port, baud_rate = 115200, pixel_count = 60):
self._serial = serial.Serial(port, baud_rate)
self._pixel_count = pixel_count
self._pixels = [blinkycolor.BLACK] * self._pixel_count
@property
def pixel_count(self):
return self._pixel_count
def set_pixel(self, index, color):
if index >= self._pixel_count: raise IndexError
self._pixels[index] = color
def set_pixels(self, pixels):
if len(pixels) != self._pixel_count: raise ValueError
self._pixels = pixels
def update(self):
UPDATE_VALUE = [0, 0, 255]
for pixel in self._pixels:
self._serial.write(pixel.raw)
self._serial.write(UPDATE_VALUE)
self._serial.flush()
|
import blinkycolor
import serial
class BlinkyTape(object):
def __init__(self, port, baud_rate = 115200, pixel_count = 60):
self._serial = serial.Serial(port, baud_rate)
self._pixel_count = pixel_count
self._pixels = [blinkycolor.BLACK] * self._pixel_count
@property
def pixel_count(self):
return self._pixel_count
def set_pixel(self, index, color):
if index >= self._pixel_count: raise IndexError
self._pixels[index] = color
def set_pixels(self, pixels):
if len(pixels) != self._pixel_count: raise ValueError
self._pixels = pixels
def update(self):
UPDATE_VALUE = [0, 0, 255]
for pixel in self._pixels:
self._serial.write(pixel.raw)
self._serial.write(UPDATE_VALUE)
self._serial.flush()
self._serial.flushInput()
|
Fix the slow-mo gradient bug by flushing BlinkyTape response on updates
|
Fix the slow-mo gradient bug by flushing BlinkyTape response on updates
|
Python
|
mit
|
jonspeicher/blinkyfun
|
import blinkycolor
import serial
class BlinkyTape(object):
def __init__(self, port, baud_rate = 115200, pixel_count = 60):
self._serial = serial.Serial(port, baud_rate)
self._pixel_count = pixel_count
self._pixels = [blinkycolor.BLACK] * self._pixel_count
@property
def pixel_count(self):
return self._pixel_count
def set_pixel(self, index, color):
if index >= self._pixel_count: raise IndexError
self._pixels[index] = color
def set_pixels(self, pixels):
if len(pixels) != self._pixel_count: raise ValueError
self._pixels = pixels
def update(self):
UPDATE_VALUE = [0, 0, 255]
for pixel in self._pixels:
self._serial.write(pixel.raw)
self._serial.write(UPDATE_VALUE)
self._serial.flush()
+ self._serial.flushInput()
|
Fix the slow-mo gradient bug by flushing BlinkyTape response on updates
|
## Code Before:
import blinkycolor
import serial
class BlinkyTape(object):
def __init__(self, port, baud_rate = 115200, pixel_count = 60):
self._serial = serial.Serial(port, baud_rate)
self._pixel_count = pixel_count
self._pixels = [blinkycolor.BLACK] * self._pixel_count
@property
def pixel_count(self):
return self._pixel_count
def set_pixel(self, index, color):
if index >= self._pixel_count: raise IndexError
self._pixels[index] = color
def set_pixels(self, pixels):
if len(pixels) != self._pixel_count: raise ValueError
self._pixels = pixels
def update(self):
UPDATE_VALUE = [0, 0, 255]
for pixel in self._pixels:
self._serial.write(pixel.raw)
self._serial.write(UPDATE_VALUE)
self._serial.flush()
## Instruction:
Fix the slow-mo gradient bug by flushing BlinkyTape response on updates
## Code After:
import blinkycolor
import serial
class BlinkyTape(object):
def __init__(self, port, baud_rate = 115200, pixel_count = 60):
self._serial = serial.Serial(port, baud_rate)
self._pixel_count = pixel_count
self._pixels = [blinkycolor.BLACK] * self._pixel_count
@property
def pixel_count(self):
return self._pixel_count
def set_pixel(self, index, color):
if index >= self._pixel_count: raise IndexError
self._pixels[index] = color
def set_pixels(self, pixels):
if len(pixels) != self._pixel_count: raise ValueError
self._pixels = pixels
def update(self):
UPDATE_VALUE = [0, 0, 255]
for pixel in self._pixels:
self._serial.write(pixel.raw)
self._serial.write(UPDATE_VALUE)
self._serial.flush()
self._serial.flushInput()
|
import blinkycolor
import serial
class BlinkyTape(object):
def __init__(self, port, baud_rate = 115200, pixel_count = 60):
self._serial = serial.Serial(port, baud_rate)
self._pixel_count = pixel_count
self._pixels = [blinkycolor.BLACK] * self._pixel_count
@property
def pixel_count(self):
return self._pixel_count
def set_pixel(self, index, color):
if index >= self._pixel_count: raise IndexError
self._pixels[index] = color
def set_pixels(self, pixels):
if len(pixels) != self._pixel_count: raise ValueError
self._pixels = pixels
def update(self):
UPDATE_VALUE = [0, 0, 255]
for pixel in self._pixels:
self._serial.write(pixel.raw)
self._serial.write(UPDATE_VALUE)
self._serial.flush()
+ self._serial.flushInput()
|
6d23f3bd1ccd45a6e739264e8d041282e6baaf0b
|
hassio/dock/util.py
|
hassio/dock/util.py
|
"""HassIO docker utilitys."""
import re
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
RESIN_BASE_IMAGE = {
ARCH_ARMHF: "resin/armhf-alpine:3.5",
ARCH_AARCH64: "resin/aarch64-alpine:3.5",
ARCH_I386: "resin/i386-alpine:3.5",
ARCH_AMD64: "resin/amd64-alpine:3.5",
}
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
def dockerfile_template(dockerfile, arch, version, meta_type):
"""Prepare a Hass.IO dockerfile."""
buff = []
resin_image = RESIN_BASE_IMAGE[arch]
# read docker
with dockerfile.open('r') as dock_input:
for line in dock_input:
line = TMPL_IMAGE.sub(resin_image, line)
buff.append(line)
# add metadata
buff.append(create_metadata(version, arch, meta_type))
# write docker
with dockerfile.open('w') as dock_output:
dock_output.writelines(buff)
def create_metadata(version, arch, meta_type):
"""Generate docker label layer for hassio."""
return ('LABEL io.hass.version="{}" '
'io.hass.arch="{}" '
'io.hass.type="{}"').format(version, arch, meta_type)
|
"""HassIO docker utilitys."""
import re
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
RESIN_BASE_IMAGE = {
ARCH_ARMHF: "homeassistant/armhf-base:latest",
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
ARCH_I386: "homeassistant/i386-base:latest",
ARCH_AMD64: "homeassistant/amd64-base:latest",
}
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
def dockerfile_template(dockerfile, arch, version, meta_type):
"""Prepare a Hass.IO dockerfile."""
buff = []
resin_image = RESIN_BASE_IMAGE[arch]
# read docker
with dockerfile.open('r') as dock_input:
for line in dock_input:
line = TMPL_IMAGE.sub(resin_image, line)
buff.append(line)
# add metadata
buff.append(create_metadata(version, arch, meta_type))
# write docker
with dockerfile.open('w') as dock_output:
dock_output.writelines(buff)
def create_metadata(version, arch, meta_type):
"""Generate docker label layer for hassio."""
return ('LABEL io.hass.version="{}" '
'io.hass.arch="{}" '
'io.hass.type="{}"').format(version, arch, meta_type)
|
Use our new base image
|
Use our new base image
|
Python
|
bsd-3-clause
|
pvizeli/hassio,pvizeli/hassio
|
"""HassIO docker utilitys."""
import re
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
RESIN_BASE_IMAGE = {
- ARCH_ARMHF: "resin/armhf-alpine:3.5",
- ARCH_AARCH64: "resin/aarch64-alpine:3.5",
- ARCH_I386: "resin/i386-alpine:3.5",
- ARCH_AMD64: "resin/amd64-alpine:3.5",
+ ARCH_ARMHF: "homeassistant/armhf-base:latest",
+ ARCH_AARCH64: "homeassistant/aarch64-base:latest",
+ ARCH_I386: "homeassistant/i386-base:latest",
+ ARCH_AMD64: "homeassistant/amd64-base:latest",
}
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
def dockerfile_template(dockerfile, arch, version, meta_type):
"""Prepare a Hass.IO dockerfile."""
buff = []
resin_image = RESIN_BASE_IMAGE[arch]
# read docker
with dockerfile.open('r') as dock_input:
for line in dock_input:
line = TMPL_IMAGE.sub(resin_image, line)
buff.append(line)
# add metadata
buff.append(create_metadata(version, arch, meta_type))
# write docker
with dockerfile.open('w') as dock_output:
dock_output.writelines(buff)
def create_metadata(version, arch, meta_type):
"""Generate docker label layer for hassio."""
return ('LABEL io.hass.version="{}" '
'io.hass.arch="{}" '
'io.hass.type="{}"').format(version, arch, meta_type)
|
Use our new base image
|
## Code Before:
"""HassIO docker utilitys."""
import re
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
RESIN_BASE_IMAGE = {
ARCH_ARMHF: "resin/armhf-alpine:3.5",
ARCH_AARCH64: "resin/aarch64-alpine:3.5",
ARCH_I386: "resin/i386-alpine:3.5",
ARCH_AMD64: "resin/amd64-alpine:3.5",
}
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
def dockerfile_template(dockerfile, arch, version, meta_type):
"""Prepare a Hass.IO dockerfile."""
buff = []
resin_image = RESIN_BASE_IMAGE[arch]
# read docker
with dockerfile.open('r') as dock_input:
for line in dock_input:
line = TMPL_IMAGE.sub(resin_image, line)
buff.append(line)
# add metadata
buff.append(create_metadata(version, arch, meta_type))
# write docker
with dockerfile.open('w') as dock_output:
dock_output.writelines(buff)
def create_metadata(version, arch, meta_type):
"""Generate docker label layer for hassio."""
return ('LABEL io.hass.version="{}" '
'io.hass.arch="{}" '
'io.hass.type="{}"').format(version, arch, meta_type)
## Instruction:
Use our new base image
## Code After:
"""HassIO docker utilitys."""
import re
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
RESIN_BASE_IMAGE = {
ARCH_ARMHF: "homeassistant/armhf-base:latest",
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
ARCH_I386: "homeassistant/i386-base:latest",
ARCH_AMD64: "homeassistant/amd64-base:latest",
}
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
def dockerfile_template(dockerfile, arch, version, meta_type):
"""Prepare a Hass.IO dockerfile."""
buff = []
resin_image = RESIN_BASE_IMAGE[arch]
# read docker
with dockerfile.open('r') as dock_input:
for line in dock_input:
line = TMPL_IMAGE.sub(resin_image, line)
buff.append(line)
# add metadata
buff.append(create_metadata(version, arch, meta_type))
# write docker
with dockerfile.open('w') as dock_output:
dock_output.writelines(buff)
def create_metadata(version, arch, meta_type):
"""Generate docker label layer for hassio."""
return ('LABEL io.hass.version="{}" '
'io.hass.arch="{}" '
'io.hass.type="{}"').format(version, arch, meta_type)
|
"""HassIO docker utilitys."""
import re
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
RESIN_BASE_IMAGE = {
- ARCH_ARMHF: "resin/armhf-alpine:3.5",
- ARCH_AARCH64: "resin/aarch64-alpine:3.5",
- ARCH_I386: "resin/i386-alpine:3.5",
- ARCH_AMD64: "resin/amd64-alpine:3.5",
+ ARCH_ARMHF: "homeassistant/armhf-base:latest",
+ ARCH_AARCH64: "homeassistant/aarch64-base:latest",
+ ARCH_I386: "homeassistant/i386-base:latest",
+ ARCH_AMD64: "homeassistant/amd64-base:latest",
}
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
def dockerfile_template(dockerfile, arch, version, meta_type):
"""Prepare a Hass.IO dockerfile."""
buff = []
resin_image = RESIN_BASE_IMAGE[arch]
# read docker
with dockerfile.open('r') as dock_input:
for line in dock_input:
line = TMPL_IMAGE.sub(resin_image, line)
buff.append(line)
# add metadata
buff.append(create_metadata(version, arch, meta_type))
# write docker
with dockerfile.open('w') as dock_output:
dock_output.writelines(buff)
def create_metadata(version, arch, meta_type):
"""Generate docker label layer for hassio."""
return ('LABEL io.hass.version="{}" '
'io.hass.arch="{}" '
'io.hass.type="{}"').format(version, arch, meta_type)
|
15c51102f8e9f37bb08f9f6c04c7da2d75250cd2
|
cabot/cabot_config.py
|
cabot/cabot_config.py
|
import os
GRAPHITE_API = os.environ.get('GRAPHITE_API')
GRAPHITE_USER = os.environ.get('GRAPHITE_USER')
GRAPHITE_PASS = os.environ.get('GRAPHITE_PASS')
GRAPHITE_FROM = os.getenv('GRAPHITE_FROM', '-10minute')
JENKINS_API = os.environ.get('JENKINS_API')
JENKINS_USER = os.environ.get('JENKINS_USER')
JENKINS_PASS = os.environ.get('JENKINS_PASS')
CALENDAR_ICAL_URL = os.environ.get('CALENDAR_ICAL_URL')
WWW_HTTP_HOST = os.environ.get('WWW_HTTP_HOST')
WWW_SCHEME = os.environ.get('WWW_SCHEME', "https")
ALERT_INTERVAL = os.environ.get('ALERT_INTERVAL', 10)
NOTIFICATION_INTERVAL = os.environ.get('NOTIFICATION_INTERVAL', 120)
# Default plugins are used if the user has not specified.
CABOT_PLUGINS_ENABLED = os.environ.get('CABOT_PLUGINS_ENABLED', 'cabot_alert_hipchat,cabot_alert_twilio,cabot_alert_email')
|
import os
GRAPHITE_API = os.environ.get('GRAPHITE_API')
GRAPHITE_USER = os.environ.get('GRAPHITE_USER')
GRAPHITE_PASS = os.environ.get('GRAPHITE_PASS')
GRAPHITE_FROM = os.getenv('GRAPHITE_FROM', '-10minute')
JENKINS_API = os.environ.get('JENKINS_API')
JENKINS_USER = os.environ.get('JENKINS_USER')
JENKINS_PASS = os.environ.get('JENKINS_PASS')
CALENDAR_ICAL_URL = os.environ.get('CALENDAR_ICAL_URL')
WWW_HTTP_HOST = os.environ.get('WWW_HTTP_HOST')
WWW_SCHEME = os.environ.get('WWW_SCHEME', "https")
ALERT_INTERVAL = int(os.environ.get('ALERT_INTERVAL', 10))
NOTIFICATION_INTERVAL = int(os.environ.get('NOTIFICATION_INTERVAL', 120))
# Default plugins are used if the user has not specified.
CABOT_PLUGINS_ENABLED = os.environ.get('CABOT_PLUGINS_ENABLED', 'cabot_alert_hipchat,cabot_alert_twilio,cabot_alert_email')
|
Convert *_INTERVAL variables to int
|
Convert *_INTERVAL variables to int
ALERT_INTERVAL and NOTIFICATION_INTERVAL are now converted to
numbers. This allows user-defined ALERT_INTERVAL and
NOTIFICATION_INTERVAL env variables to work without throwing
TypeErrors:
return self.run(*args, **kwargs)
File "/cabot/cabot/cabotapp/tasks.py", line 68, in update_service
service.update_status()
File "/cabot/cabot/cabotapp/models.py", line 243, in update_status
self.alert()
File "/cabot/cabot/cabotapp/models.py", line 174, in alert
if self.last_alert_sent and (timezone.now() - timedelta(minutes=settings.ALERT_INTERVAL)) < self.last_alert_sent:
TypeError: unsupported type for timedelta minutes component: str
|
Python
|
mit
|
arachnys/cabot,reddit/cabot,cmclaughlin/cabot,cmclaughlin/cabot,bonniejools/cabot,dever860/cabot,lghamie/cabot,jdycar/cabot,jdycar/cabot,movermeyer/cabot,xinity/cabot,lghamie/cabot,dever860/cabot,lghamie/cabot,dever860/cabot,cmclaughlin/cabot,arachnys/cabot,maks-us/cabot,mcansky/cabotapp,xinity/cabot,cmclaughlin/cabot,bonniejools/cabot,xinity/cabot,mcansky/cabotapp,maks-us/cabot,robocopio/cabot,movermeyer/cabot,dever860/cabot,robocopio/cabot,mcansky/cabotapp,robocopio/cabot,robocopio/cabot,arachnys/cabot,maks-us/cabot,reddit/cabot,lghamie/cabot,reddit/cabot,bonniejools/cabot,xinity/cabot,bonniejools/cabot,arachnys/cabot,movermeyer/cabot,movermeyer/cabot,maks-us/cabot,reddit/cabot,jdycar/cabot,mcansky/cabotapp,jdycar/cabot
|
import os
GRAPHITE_API = os.environ.get('GRAPHITE_API')
GRAPHITE_USER = os.environ.get('GRAPHITE_USER')
GRAPHITE_PASS = os.environ.get('GRAPHITE_PASS')
GRAPHITE_FROM = os.getenv('GRAPHITE_FROM', '-10minute')
JENKINS_API = os.environ.get('JENKINS_API')
JENKINS_USER = os.environ.get('JENKINS_USER')
JENKINS_PASS = os.environ.get('JENKINS_PASS')
CALENDAR_ICAL_URL = os.environ.get('CALENDAR_ICAL_URL')
WWW_HTTP_HOST = os.environ.get('WWW_HTTP_HOST')
WWW_SCHEME = os.environ.get('WWW_SCHEME', "https")
- ALERT_INTERVAL = os.environ.get('ALERT_INTERVAL', 10)
+ ALERT_INTERVAL = int(os.environ.get('ALERT_INTERVAL', 10))
- NOTIFICATION_INTERVAL = os.environ.get('NOTIFICATION_INTERVAL', 120)
+ NOTIFICATION_INTERVAL = int(os.environ.get('NOTIFICATION_INTERVAL', 120))
# Default plugins are used if the user has not specified.
CABOT_PLUGINS_ENABLED = os.environ.get('CABOT_PLUGINS_ENABLED', 'cabot_alert_hipchat,cabot_alert_twilio,cabot_alert_email')
|
Convert *_INTERVAL variables to int
|
## Code Before:
import os
GRAPHITE_API = os.environ.get('GRAPHITE_API')
GRAPHITE_USER = os.environ.get('GRAPHITE_USER')
GRAPHITE_PASS = os.environ.get('GRAPHITE_PASS')
GRAPHITE_FROM = os.getenv('GRAPHITE_FROM', '-10minute')
JENKINS_API = os.environ.get('JENKINS_API')
JENKINS_USER = os.environ.get('JENKINS_USER')
JENKINS_PASS = os.environ.get('JENKINS_PASS')
CALENDAR_ICAL_URL = os.environ.get('CALENDAR_ICAL_URL')
WWW_HTTP_HOST = os.environ.get('WWW_HTTP_HOST')
WWW_SCHEME = os.environ.get('WWW_SCHEME', "https")
ALERT_INTERVAL = os.environ.get('ALERT_INTERVAL', 10)
NOTIFICATION_INTERVAL = os.environ.get('NOTIFICATION_INTERVAL', 120)
# Default plugins are used if the user has not specified.
CABOT_PLUGINS_ENABLED = os.environ.get('CABOT_PLUGINS_ENABLED', 'cabot_alert_hipchat,cabot_alert_twilio,cabot_alert_email')
## Instruction:
Convert *_INTERVAL variables to int
## Code After:
import os
GRAPHITE_API = os.environ.get('GRAPHITE_API')
GRAPHITE_USER = os.environ.get('GRAPHITE_USER')
GRAPHITE_PASS = os.environ.get('GRAPHITE_PASS')
GRAPHITE_FROM = os.getenv('GRAPHITE_FROM', '-10minute')
JENKINS_API = os.environ.get('JENKINS_API')
JENKINS_USER = os.environ.get('JENKINS_USER')
JENKINS_PASS = os.environ.get('JENKINS_PASS')
CALENDAR_ICAL_URL = os.environ.get('CALENDAR_ICAL_URL')
WWW_HTTP_HOST = os.environ.get('WWW_HTTP_HOST')
WWW_SCHEME = os.environ.get('WWW_SCHEME', "https")
ALERT_INTERVAL = int(os.environ.get('ALERT_INTERVAL', 10))
NOTIFICATION_INTERVAL = int(os.environ.get('NOTIFICATION_INTERVAL', 120))
# Default plugins are used if the user has not specified.
CABOT_PLUGINS_ENABLED = os.environ.get('CABOT_PLUGINS_ENABLED', 'cabot_alert_hipchat,cabot_alert_twilio,cabot_alert_email')
|
import os
GRAPHITE_API = os.environ.get('GRAPHITE_API')
GRAPHITE_USER = os.environ.get('GRAPHITE_USER')
GRAPHITE_PASS = os.environ.get('GRAPHITE_PASS')
GRAPHITE_FROM = os.getenv('GRAPHITE_FROM', '-10minute')
JENKINS_API = os.environ.get('JENKINS_API')
JENKINS_USER = os.environ.get('JENKINS_USER')
JENKINS_PASS = os.environ.get('JENKINS_PASS')
CALENDAR_ICAL_URL = os.environ.get('CALENDAR_ICAL_URL')
WWW_HTTP_HOST = os.environ.get('WWW_HTTP_HOST')
WWW_SCHEME = os.environ.get('WWW_SCHEME', "https")
- ALERT_INTERVAL = os.environ.get('ALERT_INTERVAL', 10)
+ ALERT_INTERVAL = int(os.environ.get('ALERT_INTERVAL', 10))
? ++++ +
- NOTIFICATION_INTERVAL = os.environ.get('NOTIFICATION_INTERVAL', 120)
+ NOTIFICATION_INTERVAL = int(os.environ.get('NOTIFICATION_INTERVAL', 120))
? ++++ +
# Default plugins are used if the user has not specified.
CABOT_PLUGINS_ENABLED = os.environ.get('CABOT_PLUGINS_ENABLED', 'cabot_alert_hipchat,cabot_alert_twilio,cabot_alert_email')
|
22382935be99e027da46303107926a15cd8f3017
|
tests/twisted/vcard/test-set-alias.py
|
tests/twisted/vcard/test-set-alias.py
|
from servicetest import EventPattern
from gabbletest import exec_test, acknowledge_iq
import constants as cs
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
|
from servicetest import EventPattern, assertEquals
from gabbletest import exec_test, acknowledge_iq
import constants as cs
import ns
def validate_pep_update(pep_update, expected_nickname):
publish = pep_update.query.elements(uri=ns.PUBSUB, name='publish').next()
assertEquals(ns.NICK, publish['node'])
item = publish.elements(uri=ns.PUBSUB, name='item').next()
nick = item.elements(uri=ns.NICK, name='nick').next()
assertEquals(expected_nickname, nick.children[0])
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
pep_update = q.expect('stream-iq', iq_type='set', query_ns=ns.PUBSUB, query_name='pubsub')
validate_pep_update(pep_update, 'lala')
acknowledge_iq(stream, pep_update.stanza)
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
|
Test setting our own alias via PEP
|
Test setting our own alias via PEP
Astonishingly, this was untested...
|
Python
|
lgpl-2.1
|
Ziemin/telepathy-gabble,jku/telepathy-gabble,mlundblad/telepathy-gabble,Ziemin/telepathy-gabble,mlundblad/telepathy-gabble,jku/telepathy-gabble,Ziemin/telepathy-gabble,mlundblad/telepathy-gabble,jku/telepathy-gabble,Ziemin/telepathy-gabble
|
- from servicetest import EventPattern
+ from servicetest import EventPattern, assertEquals
from gabbletest import exec_test, acknowledge_iq
import constants as cs
+ import ns
+
+ def validate_pep_update(pep_update, expected_nickname):
+ publish = pep_update.query.elements(uri=ns.PUBSUB, name='publish').next()
+ assertEquals(ns.NICK, publish['node'])
+ item = publish.elements(uri=ns.PUBSUB, name='item').next()
+ nick = item.elements(uri=ns.NICK, name='nick').next()
+ assertEquals(expected_nickname, nick.children[0])
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
+
+ pep_update = q.expect('stream-iq', iq_type='set', query_ns=ns.PUBSUB, query_name='pubsub')
+ validate_pep_update(pep_update, 'lala')
+ acknowledge_iq(stream, pep_update.stanza)
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
|
Test setting our own alias via PEP
|
## Code Before:
from servicetest import EventPattern
from gabbletest import exec_test, acknowledge_iq
import constants as cs
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
## Instruction:
Test setting our own alias via PEP
## Code After:
from servicetest import EventPattern, assertEquals
from gabbletest import exec_test, acknowledge_iq
import constants as cs
import ns
def validate_pep_update(pep_update, expected_nickname):
publish = pep_update.query.elements(uri=ns.PUBSUB, name='publish').next()
assertEquals(ns.NICK, publish['node'])
item = publish.elements(uri=ns.PUBSUB, name='item').next()
nick = item.elements(uri=ns.NICK, name='nick').next()
assertEquals(expected_nickname, nick.children[0])
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
pep_update = q.expect('stream-iq', iq_type='set', query_ns=ns.PUBSUB, query_name='pubsub')
validate_pep_update(pep_update, 'lala')
acknowledge_iq(stream, pep_update.stanza)
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
|
- from servicetest import EventPattern
+ from servicetest import EventPattern, assertEquals
? ++++++++++++++
from gabbletest import exec_test, acknowledge_iq
import constants as cs
+ import ns
+
+ def validate_pep_update(pep_update, expected_nickname):
+ publish = pep_update.query.elements(uri=ns.PUBSUB, name='publish').next()
+ assertEquals(ns.NICK, publish['node'])
+ item = publish.elements(uri=ns.PUBSUB, name='item').next()
+ nick = item.elements(uri=ns.NICK, name='nick').next()
+ assertEquals(expected_nickname, nick.children[0])
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
+
+ pep_update = q.expect('stream-iq', iq_type='set', query_ns=ns.PUBSUB, query_name='pubsub')
+ validate_pep_update(pep_update, 'lala')
+ acknowledge_iq(stream, pep_update.stanza)
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
|
d9d2c7d341894e28a5ad73469ec0d9d23d78429e
|
vispy/visuals/graphs/layouts/__init__.py
|
vispy/visuals/graphs/layouts/__init__.py
|
from .random import random # noqa
from .circular import circular # noqa
|
import inspect
from .random import random
from .circular import circular
from .force_directed import fruchterman_reingold
_layout_map = {
'random': random,
'circular': circular,
'force_directed': fruchterman_reingold
}
def get(name, *args, **kwargs):
if name not in _layout_map:
raise KeyError("Graph layout '{}' not found.".format(name))
layout = _layout_map[name]
if inspect.isclass(layout):
layout = layout(*args, **kwargs)
return layout
|
Add new way of retreiving graph layouts
|
Add new way of retreiving graph layouts
|
Python
|
bsd-3-clause
|
ghisvail/vispy,michaelaye/vispy,Eric89GXL/vispy,michaelaye/vispy,Eric89GXL/vispy,Eric89GXL/vispy,drufat/vispy,drufat/vispy,ghisvail/vispy,michaelaye/vispy,drufat/vispy,ghisvail/vispy
|
+ import inspect
- from .random import random # noqa
- from .circular import circular # noqa
+ from .random import random
+ from .circular import circular
+ from .force_directed import fruchterman_reingold
+
+
+ _layout_map = {
+ 'random': random,
+ 'circular': circular,
+ 'force_directed': fruchterman_reingold
+ }
+
+
+ def get(name, *args, **kwargs):
+ if name not in _layout_map:
+ raise KeyError("Graph layout '{}' not found.".format(name))
+
+ layout = _layout_map[name]
+
+ if inspect.isclass(layout):
+ layout = layout(*args, **kwargs)
+
+ return layout
+
|
Add new way of retreiving graph layouts
|
## Code Before:
from .random import random # noqa
from .circular import circular # noqa
## Instruction:
Add new way of retreiving graph layouts
## Code After:
import inspect
from .random import random
from .circular import circular
from .force_directed import fruchterman_reingold
_layout_map = {
'random': random,
'circular': circular,
'force_directed': fruchterman_reingold
}
def get(name, *args, **kwargs):
if name not in _layout_map:
raise KeyError("Graph layout '{}' not found.".format(name))
layout = _layout_map[name]
if inspect.isclass(layout):
layout = layout(*args, **kwargs)
return layout
|
+ import inspect
+
- from .random import random # noqa
? --------
+ from .random import random
- from .circular import circular # noqa
? --------
+ from .circular import circular
+ from .force_directed import fruchterman_reingold
+
+
+ _layout_map = {
+ 'random': random,
+ 'circular': circular,
+ 'force_directed': fruchterman_reingold
+ }
+
+
+ def get(name, *args, **kwargs):
+ if name not in _layout_map:
+ raise KeyError("Graph layout '{}' not found.".format(name))
+
+ layout = _layout_map[name]
+
+ if inspect.isclass(layout):
+ layout = layout(*args, **kwargs)
+
+ return layout
|
2e8d7952f4508e1cbf8d5d9b321a15bcd3bcf2ed
|
pylearn2/packaged_dependencies/theano_linear/util.py
|
pylearn2/packaged_dependencies/theano_linear/util.py
|
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
# XXX: copy-paste out of pylearn
try:
from pylearn.io.image_tiling import tile_slices_to_image
except ImportError:
def tile_slices_to_image(*args, **kwargs):
raise NotImplementedError()
|
from imaging import tile_slices_to_image
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
|
Remove pylearn1 dependency from packaged_dependencies/theano_linear
|
Remove pylearn1 dependency from packaged_dependencies/theano_linear
|
Python
|
bsd-3-clause
|
caidongyun/pylearn2,hantek/pylearn2,msingh172/pylearn2,junbochen/pylearn2,sandeepkbhat/pylearn2,pkainz/pylearn2,lunyang/pylearn2,skearnes/pylearn2,cosmoharrigan/pylearn2,lunyang/pylearn2,aalmah/pylearn2,goodfeli/pylearn2,woozzu/pylearn2,mkraemer67/pylearn2,matrogers/pylearn2,goodfeli/pylearn2,skearnes/pylearn2,theoryno3/pylearn2,mclaughlin6464/pylearn2,aalmah/pylearn2,shiquanwang/pylearn2,hyqneuron/pylearn2-maxsom,fulmicoton/pylearn2,matrogers/pylearn2,pombredanne/pylearn2,hyqneuron/pylearn2-maxsom,kastnerkyle/pylearn2,msingh172/pylearn2,alexjc/pylearn2,fulmicoton/pylearn2,woozzu/pylearn2,jeremyfix/pylearn2,bartvm/pylearn2,hantek/pylearn2,lisa-lab/pylearn2,daemonmaker/pylearn2,cosmoharrigan/pylearn2,woozzu/pylearn2,kose-y/pylearn2,JesseLivezey/pylearn2,hyqneuron/pylearn2-maxsom,matrogers/pylearn2,fyffyt/pylearn2,mclaughlin6464/pylearn2,sandeepkbhat/pylearn2,CIFASIS/pylearn2,fulmicoton/pylearn2,KennethPierce/pylearnk,JesseLivezey/plankton,shiquanwang/pylearn2,lunyang/pylearn2,w1kke/pylearn2,shiquanwang/pylearn2,fishcorn/pylearn2,pkainz/pylearn2,jamessergeant/pylearn2,Refefer/pylearn2,daemonmaker/pylearn2,msingh172/pylearn2,hantek/pylearn2,junbochen/pylearn2,abergeron/pylearn2,lancezlin/pylearn2,jeremyfix/pylearn2,pombredanne/pylearn2,bartvm/pylearn2,sandeepkbhat/pylearn2,TNick/pylearn2,KennethPierce/pylearnk,TNick/pylearn2,Refefer/pylearn2,se4u/pylearn2,aalmah/pylearn2,JesseLivezey/plankton,bartvm/pylearn2,alexjc/pylearn2,JesseLivezey/pylearn2,w1kke/pylearn2,junbochen/pylearn2,fyffyt/pylearn2,woozzu/pylearn2,kastnerkyle/pylearn2,kastnerkyle/pylearn2,msingh172/pylearn2,goodfeli/pylearn2,theoryno3/pylearn2,se4u/pylearn2,chrish42/pylearn,ashhher3/pylearn2,pombredanne/pylearn2,abergeron/pylearn2,mclaughlin6464/pylearn2,Refefer/pylearn2,alexjc/pylearn2,KennethPierce/pylearnk,mkraemer67/pylearn2,fulmicoton/pylearn2,pkainz/pylearn2,lunyang/pylearn2,mkraemer67/pylearn2,nouiz/pylearn2,chrish42/pylearn,alexjc/pylearn2,kose-y/pylearn2,ddboline/pylearn2,lisa-lab/pylearn2,CIFASIS/pylearn2,mclaughlin6464/pylearn2,chrish42/pylearn,fishcorn/pylearn2,lamblin/pylearn2,junbochen/pylearn2,lamblin/pylearn2,shiquanwang/pylearn2,lancezlin/pylearn2,KennethPierce/pylearnk,se4u/pylearn2,ddboline/pylearn2,se4u/pylearn2,fyffyt/pylearn2,theoryno3/pylearn2,daemonmaker/pylearn2,chrish42/pylearn,jamessergeant/pylearn2,ddboline/pylearn2,caidongyun/pylearn2,nouiz/pylearn2,pombredanne/pylearn2,skearnes/pylearn2,JesseLivezey/plankton,lancezlin/pylearn2,goodfeli/pylearn2,ashhher3/pylearn2,fyffyt/pylearn2,CIFASIS/pylearn2,theoryno3/pylearn2,bartvm/pylearn2,CIFASIS/pylearn2,caidongyun/pylearn2,ashhher3/pylearn2,kose-y/pylearn2,ddboline/pylearn2,ashhher3/pylearn2,TNick/pylearn2,matrogers/pylearn2,pkainz/pylearn2,abergeron/pylearn2,lisa-lab/pylearn2,fishcorn/pylearn2,lisa-lab/pylearn2,abergeron/pylearn2,jamessergeant/pylearn2,jeremyfix/pylearn2,JesseLivezey/pylearn2,cosmoharrigan/pylearn2,fishcorn/pylearn2,TNick/pylearn2,w1kke/pylearn2,aalmah/pylearn2,mkraemer67/pylearn2,hantek/pylearn2,jeremyfix/pylearn2,JesseLivezey/pylearn2,hyqneuron/pylearn2-maxsom,kose-y/pylearn2,lamblin/pylearn2,kastnerkyle/pylearn2,lancezlin/pylearn2,nouiz/pylearn2,w1kke/pylearn2,JesseLivezey/plankton,sandeepkbhat/pylearn2,caidongyun/pylearn2,Refefer/pylearn2,skearnes/pylearn2,daemonmaker/pylearn2,jamessergeant/pylearn2,nouiz/pylearn2,lamblin/pylearn2,cosmoharrigan/pylearn2
|
+
+ from imaging import tile_slices_to_image
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
- # XXX: copy-paste out of pylearn
- try:
- from pylearn.io.image_tiling import tile_slices_to_image
- except ImportError:
- def tile_slices_to_image(*args, **kwargs):
- raise NotImplementedError()
-
-
-
|
Remove pylearn1 dependency from packaged_dependencies/theano_linear
|
## Code Before:
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
# XXX: copy-paste out of pylearn
try:
from pylearn.io.image_tiling import tile_slices_to_image
except ImportError:
def tile_slices_to_image(*args, **kwargs):
raise NotImplementedError()
## Instruction:
Remove pylearn1 dependency from packaged_dependencies/theano_linear
## Code After:
from imaging import tile_slices_to_image
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
|
+
+ from imaging import tile_slices_to_image
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
-
- # XXX: copy-paste out of pylearn
- try:
- from pylearn.io.image_tiling import tile_slices_to_image
- except ImportError:
- def tile_slices_to_image(*args, **kwargs):
- raise NotImplementedError()
-
-
|
ddbce972db10ce92a79982161355ed978fb0c554
|
web/extras/contentment/components/event/model.py
|
web/extras/contentment/components/event/model.py
|
"""Event model."""
import mongoengine as db
from web.extras.contentment.components.page.model import Page
from widgets import fields
log = __import__('logging').getLogger(__name__)
__all__ = ['EventContact', 'Event']
class EventContact(db.EmbeddedDocument):
name = db.StringField(max_length=250)
email = db.StringField(max_length=250)
phone = db.StringField(max_length=64)
class Event(Page):
_widgets = fields
default = db.StringField(default="view:event", max_length=128)
organizer = db.StringField(max_length=250)
location = db.StringField(max_length=250)
starts = db.DateTimeField()
stops = db.DateTimeField()
allday = db.BooleanField(default=False)
contact = db.EmbeddedDocumentField(EventContact)
|
"""Event model."""
import mongoengine as db
from web.extras.contentment.components.page.model import Page
from widgets import fields
log = __import__('logging').getLogger(__name__)
__all__ = ['EventContact', 'Event']
class EventContact(db.EmbeddedDocument):
name = db.StringField(max_length=250)
email = db.StringField(max_length=250)
phone = db.StringField(max_length=64)
class Event(Page):
_widgets = fields
default = db.StringField(default="view:event", max_length=128)
organizer = db.StringField(max_length=250)
location = db.StringField(max_length=250)
starts = db.DateTimeField()
stops = db.DateTimeField()
allday = db.BooleanField(default=False)
contact = db.EmbeddedDocumentField(EventContact)
def process(self, formdata):
formdata = super(Event, self).process(formdata)
contact = EventContact()
for field in 'name', 'email', 'phone':
combined = 'contact.' + field
if combined in formdata:
setattr(contact, field, formdata[combined])
del formdata[combined]
formdata['contact'] = contact
return formdata
|
Fix for inability to define contact information.
|
Fix for inability to define contact information.
|
Python
|
mit
|
marrow/contentment,marrow/contentment
|
"""Event model."""
import mongoengine as db
from web.extras.contentment.components.page.model import Page
from widgets import fields
log = __import__('logging').getLogger(__name__)
__all__ = ['EventContact', 'Event']
class EventContact(db.EmbeddedDocument):
name = db.StringField(max_length=250)
email = db.StringField(max_length=250)
phone = db.StringField(max_length=64)
class Event(Page):
_widgets = fields
default = db.StringField(default="view:event", max_length=128)
organizer = db.StringField(max_length=250)
location = db.StringField(max_length=250)
starts = db.DateTimeField()
stops = db.DateTimeField()
allday = db.BooleanField(default=False)
contact = db.EmbeddedDocumentField(EventContact)
+
+ def process(self, formdata):
+ formdata = super(Event, self).process(formdata)
+
+ contact = EventContact()
+
+ for field in 'name', 'email', 'phone':
+ combined = 'contact.' + field
+ if combined in formdata:
+ setattr(contact, field, formdata[combined])
+ del formdata[combined]
+
+ formdata['contact'] = contact
+
+ return formdata
+
|
Fix for inability to define contact information.
|
## Code Before:
"""Event model."""
import mongoengine as db
from web.extras.contentment.components.page.model import Page
from widgets import fields
log = __import__('logging').getLogger(__name__)
__all__ = ['EventContact', 'Event']
class EventContact(db.EmbeddedDocument):
name = db.StringField(max_length=250)
email = db.StringField(max_length=250)
phone = db.StringField(max_length=64)
class Event(Page):
_widgets = fields
default = db.StringField(default="view:event", max_length=128)
organizer = db.StringField(max_length=250)
location = db.StringField(max_length=250)
starts = db.DateTimeField()
stops = db.DateTimeField()
allday = db.BooleanField(default=False)
contact = db.EmbeddedDocumentField(EventContact)
## Instruction:
Fix for inability to define contact information.
## Code After:
"""Event model."""
import mongoengine as db
from web.extras.contentment.components.page.model import Page
from widgets import fields
log = __import__('logging').getLogger(__name__)
__all__ = ['EventContact', 'Event']
class EventContact(db.EmbeddedDocument):
name = db.StringField(max_length=250)
email = db.StringField(max_length=250)
phone = db.StringField(max_length=64)
class Event(Page):
_widgets = fields
default = db.StringField(default="view:event", max_length=128)
organizer = db.StringField(max_length=250)
location = db.StringField(max_length=250)
starts = db.DateTimeField()
stops = db.DateTimeField()
allday = db.BooleanField(default=False)
contact = db.EmbeddedDocumentField(EventContact)
def process(self, formdata):
formdata = super(Event, self).process(formdata)
contact = EventContact()
for field in 'name', 'email', 'phone':
combined = 'contact.' + field
if combined in formdata:
setattr(contact, field, formdata[combined])
del formdata[combined]
formdata['contact'] = contact
return formdata
|
"""Event model."""
import mongoengine as db
from web.extras.contentment.components.page.model import Page
from widgets import fields
log = __import__('logging').getLogger(__name__)
__all__ = ['EventContact', 'Event']
class EventContact(db.EmbeddedDocument):
name = db.StringField(max_length=250)
email = db.StringField(max_length=250)
phone = db.StringField(max_length=64)
class Event(Page):
_widgets = fields
default = db.StringField(default="view:event", max_length=128)
organizer = db.StringField(max_length=250)
location = db.StringField(max_length=250)
starts = db.DateTimeField()
stops = db.DateTimeField()
allday = db.BooleanField(default=False)
contact = db.EmbeddedDocumentField(EventContact)
+
+ def process(self, formdata):
+ formdata = super(Event, self).process(formdata)
+
+ contact = EventContact()
+
+ for field in 'name', 'email', 'phone':
+ combined = 'contact.' + field
+ if combined in formdata:
+ setattr(contact, field, formdata[combined])
+ del formdata[combined]
+
+ formdata['contact'] = contact
+
+ return formdata
+
|
2261a15132a0b98821cb3e5614c044f1b41fbc73
|
smsgateway/__init__.py
|
smsgateway/__init__.py
|
__version__ = '2.0.0'
def get_account(using=None):
from django.conf import settings
accounts = settings.SMSGATEWAY_ACCOUNTS
if using is not None:
return accounts[using]
else:
return accounts[accounts['__default__']]
def send(to, msg, signature, using=None, reliable=False):
"""
Send an SMS message immediately.
* 'to' is a semicolon separated list of phone numbers with an international
prefix (+32... etc).
* 'msg' is the message itself as a unicode object (max 160 characters).
* 'signature' is where the message comes from. Depends on the backend in use.
* 'using' is an optional parameter where you can specify a specific account
to send messages from.
"""
from smsgateway.backends import get_backend
from smsgateway.sms import SMSRequest
account_dict = get_account(using)
backend = get_backend(account_dict['backend'])
sms_request = SMSRequest(to, msg, signature, reliable=reliable)
return backend.send(sms_request, account_dict)
def send_queued(to, msg, signature, using=None, reliable=False):
"""
Place SMS message in queue to be sent.
"""
from smsgateway.models import QueuedSMS
QueuedSMS.objects.create(
to=to,
content=msg,
signature=signature,
using=using if using is not None else '__none__',
reliable=reliable
)
|
__version__ = '2.0.0'
def get_account(using=None):
from django.conf import settings
accounts = settings.SMSGATEWAY_ACCOUNTS
if using is not None:
return accounts[using]
else:
return accounts[accounts['__default__']]
def send(to, msg, signature, using=None, reliable=False):
"""
Send an SMS message immediately.
* 'to' is a semicolon separated list of phone numbers with an international
prefix (+32... etc).
* 'msg' is the message itself as a unicode object (max 160 characters).
* 'signature' is where the message comes from. Depends on the backend in use.
* 'using' is an optional parameter where you can specify a specific account
to send messages from.
"""
from smsgateway.backends import get_backend
from smsgateway.sms import SMSRequest
account_dict = get_account(using)
backend = get_backend(account_dict['backend'])
sms_request = SMSRequest(to, msg, signature, reliable=reliable)
return backend.send(sms_request, account_dict)
def send_queued(to, msg, signature, using=None, reliable=False, priority=None):
"""
Place SMS message in queue to be sent.
"""
from smsgateway.models import QueuedSMS
queued_sms = QueuedSMS(
to=to,
content=msg,
signature=signature,
using=using if using is not None else '__none__',
reliable=reliable
)
if priority is not None:
queued_sms.priority = priority
queued_sms.save()
|
Add priority option to send_queued
|
Add priority option to send_queued
|
Python
|
bsd-3-clause
|
peterayeni/django-smsgateway,mvpoland/django-smsgateway,peterayeni/django-smsgateway,mvpoland/django-smsgateway,peterayeni/django-smsgateway,peterayeni/django-smsgateway,mvpoland/django-smsgateway
|
__version__ = '2.0.0'
def get_account(using=None):
from django.conf import settings
accounts = settings.SMSGATEWAY_ACCOUNTS
if using is not None:
return accounts[using]
else:
return accounts[accounts['__default__']]
def send(to, msg, signature, using=None, reliable=False):
"""
Send an SMS message immediately.
* 'to' is a semicolon separated list of phone numbers with an international
prefix (+32... etc).
* 'msg' is the message itself as a unicode object (max 160 characters).
* 'signature' is where the message comes from. Depends on the backend in use.
* 'using' is an optional parameter where you can specify a specific account
to send messages from.
"""
from smsgateway.backends import get_backend
from smsgateway.sms import SMSRequest
account_dict = get_account(using)
backend = get_backend(account_dict['backend'])
sms_request = SMSRequest(to, msg, signature, reliable=reliable)
return backend.send(sms_request, account_dict)
- def send_queued(to, msg, signature, using=None, reliable=False):
+ def send_queued(to, msg, signature, using=None, reliable=False, priority=None):
"""
Place SMS message in queue to be sent.
"""
from smsgateway.models import QueuedSMS
- QueuedSMS.objects.create(
+ queued_sms = QueuedSMS(
to=to,
content=msg,
signature=signature,
using=using if using is not None else '__none__',
reliable=reliable
)
+ if priority is not None:
+ queued_sms.priority = priority
+ queued_sms.save()
|
Add priority option to send_queued
|
## Code Before:
__version__ = '2.0.0'
def get_account(using=None):
from django.conf import settings
accounts = settings.SMSGATEWAY_ACCOUNTS
if using is not None:
return accounts[using]
else:
return accounts[accounts['__default__']]
def send(to, msg, signature, using=None, reliable=False):
"""
Send an SMS message immediately.
* 'to' is a semicolon separated list of phone numbers with an international
prefix (+32... etc).
* 'msg' is the message itself as a unicode object (max 160 characters).
* 'signature' is where the message comes from. Depends on the backend in use.
* 'using' is an optional parameter where you can specify a specific account
to send messages from.
"""
from smsgateway.backends import get_backend
from smsgateway.sms import SMSRequest
account_dict = get_account(using)
backend = get_backend(account_dict['backend'])
sms_request = SMSRequest(to, msg, signature, reliable=reliable)
return backend.send(sms_request, account_dict)
def send_queued(to, msg, signature, using=None, reliable=False):
"""
Place SMS message in queue to be sent.
"""
from smsgateway.models import QueuedSMS
QueuedSMS.objects.create(
to=to,
content=msg,
signature=signature,
using=using if using is not None else '__none__',
reliable=reliable
)
## Instruction:
Add priority option to send_queued
## Code After:
__version__ = '2.0.0'
def get_account(using=None):
from django.conf import settings
accounts = settings.SMSGATEWAY_ACCOUNTS
if using is not None:
return accounts[using]
else:
return accounts[accounts['__default__']]
def send(to, msg, signature, using=None, reliable=False):
"""
Send an SMS message immediately.
* 'to' is a semicolon separated list of phone numbers with an international
prefix (+32... etc).
* 'msg' is the message itself as a unicode object (max 160 characters).
* 'signature' is where the message comes from. Depends on the backend in use.
* 'using' is an optional parameter where you can specify a specific account
to send messages from.
"""
from smsgateway.backends import get_backend
from smsgateway.sms import SMSRequest
account_dict = get_account(using)
backend = get_backend(account_dict['backend'])
sms_request = SMSRequest(to, msg, signature, reliable=reliable)
return backend.send(sms_request, account_dict)
def send_queued(to, msg, signature, using=None, reliable=False, priority=None):
"""
Place SMS message in queue to be sent.
"""
from smsgateway.models import QueuedSMS
queued_sms = QueuedSMS(
to=to,
content=msg,
signature=signature,
using=using if using is not None else '__none__',
reliable=reliable
)
if priority is not None:
queued_sms.priority = priority
queued_sms.save()
|
__version__ = '2.0.0'
def get_account(using=None):
from django.conf import settings
accounts = settings.SMSGATEWAY_ACCOUNTS
if using is not None:
return accounts[using]
else:
return accounts[accounts['__default__']]
def send(to, msg, signature, using=None, reliable=False):
"""
Send an SMS message immediately.
* 'to' is a semicolon separated list of phone numbers with an international
prefix (+32... etc).
* 'msg' is the message itself as a unicode object (max 160 characters).
* 'signature' is where the message comes from. Depends on the backend in use.
* 'using' is an optional parameter where you can specify a specific account
to send messages from.
"""
from smsgateway.backends import get_backend
from smsgateway.sms import SMSRequest
account_dict = get_account(using)
backend = get_backend(account_dict['backend'])
sms_request = SMSRequest(to, msg, signature, reliable=reliable)
return backend.send(sms_request, account_dict)
- def send_queued(to, msg, signature, using=None, reliable=False):
+ def send_queued(to, msg, signature, using=None, reliable=False, priority=None):
? +++++++++++++++
"""
Place SMS message in queue to be sent.
"""
from smsgateway.models import QueuedSMS
- QueuedSMS.objects.create(
+ queued_sms = QueuedSMS(
to=to,
content=msg,
signature=signature,
using=using if using is not None else '__none__',
reliable=reliable
)
+ if priority is not None:
+ queued_sms.priority = priority
+ queued_sms.save()
|
c4b7532987958573dafe01621cdd254db63bf8ea
|
bfg9000/builtins/hooks.py
|
bfg9000/builtins/hooks.py
|
import functools
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
# XXX: partial doesn't forward the docstring of the function.
return functools.partial(self._fn, *[kwargs[i] for i in self._args])
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
import functools
import inspect
import sys
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
pre_args = tuple(kwargs[i] for i in self._args)
@functools.wraps(self._fn)
def wrapped(*args, **kwargs):
return self._fn(*(pre_args + args), **kwargs)
if sys.version_info >= (3, 3):
sig = inspect.signature(wrapped)
params = list(sig.parameters.values())[len(kwargs):]
wrapped.__signature__ = inspect.Signature(params)
return wrapped
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
Change how the wrappers work for builtin functions so that docs get forwarded correctly
|
Change how the wrappers work for builtin functions so that docs get forwarded correctly
|
Python
|
bsd-3-clause
|
jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000
|
import functools
+ import inspect
+ import sys
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
- # XXX: partial doesn't forward the docstring of the function.
- return functools.partial(self._fn, *[kwargs[i] for i in self._args])
+ pre_args = tuple(kwargs[i] for i in self._args)
+
+ @functools.wraps(self._fn)
+ def wrapped(*args, **kwargs):
+ return self._fn(*(pre_args + args), **kwargs)
+
+ if sys.version_info >= (3, 3):
+ sig = inspect.signature(wrapped)
+ params = list(sig.parameters.values())[len(kwargs):]
+ wrapped.__signature__ = inspect.Signature(params)
+ return wrapped
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
Change how the wrappers work for builtin functions so that docs get forwarded correctly
|
## Code Before:
import functools
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
# XXX: partial doesn't forward the docstring of the function.
return functools.partial(self._fn, *[kwargs[i] for i in self._args])
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
## Instruction:
Change how the wrappers work for builtin functions so that docs get forwarded correctly
## Code After:
import functools
import inspect
import sys
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
pre_args = tuple(kwargs[i] for i in self._args)
@functools.wraps(self._fn)
def wrapped(*args, **kwargs):
return self._fn(*(pre_args + args), **kwargs)
if sys.version_info >= (3, 3):
sig = inspect.signature(wrapped)
params = list(sig.parameters.values())[len(kwargs):]
wrapped.__signature__ = inspect.Signature(params)
return wrapped
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
import functools
+ import inspect
+ import sys
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
- # XXX: partial doesn't forward the docstring of the function.
- return functools.partial(self._fn, *[kwargs[i] for i in self._args])
+ pre_args = tuple(kwargs[i] for i in self._args)
+
+ @functools.wraps(self._fn)
+ def wrapped(*args, **kwargs):
+ return self._fn(*(pre_args + args), **kwargs)
+
+ if sys.version_info >= (3, 3):
+ sig = inspect.signature(wrapped)
+ params = list(sig.parameters.values())[len(kwargs):]
+ wrapped.__signature__ = inspect.Signature(params)
+ return wrapped
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
ab0ba3232c1a7a4b028abe6d371b3935ea0ab110
|
eve_api/tasks.py
|
eve_api/tasks.py
|
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
log = import_apikey.get_logger()
l.info("Importing %s/%s" % (api_userid, api_key))
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
Fix error in the apikey import task
|
Fix error in the apikey import task
|
Python
|
bsd-3-clause
|
nikdoof/test-auth
|
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
-
- log = import_apikey.get_logger()
- l.info("Importing %s/%s" % (api_userid, api_key))
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
Fix error in the apikey import task
|
## Code Before:
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
log = import_apikey.get_logger()
l.info("Importing %s/%s" % (api_userid, api_key))
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
## Instruction:
Fix error in the apikey import task
## Code After:
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
-
- log = import_apikey.get_logger()
- l.info("Importing %s/%s" % (api_userid, api_key))
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
61394aec9d2193a978a0754bb43f70d1f66262d4
|
django_json_widget/widgets.py
|
django_json_widget/widgets.py
|
import json
from builtins import super
from django import forms
from django.templatetags.static import static
class JSONEditorWidget(forms.Widget):
class Media:
css = {'all': (static('dist/jsoneditor.min.css'), )}
js = (static('dist/jsoneditor.min.js'),)
template_name = 'django_json_widget.html'
def __init__(self, attrs=None, mode='code', options=None, width=None, height=None):
default_options = {
'modes': ['text', 'code', 'tree', 'form', 'view'],
'mode': mode,
'search': True,
}
if options:
default_options.update(options)
self.options = default_options
self.width = width
self.height = height
super(JSONEditorWidget, self).__init__(attrs=attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['options'] = json.dumps(self.options)
context['widget']['width'] = self.width
context['widget']['height'] = self.height
return context
|
import json
from builtins import super
from django import forms
class JSONEditorWidget(forms.Widget):
class Media:
css = {'all': ('dist/jsoneditor.min.css', )}
js = ('dist/jsoneditor.min.js',)
template_name = 'django_json_widget.html'
def __init__(self, attrs=None, mode='code', options=None, width=None, height=None):
default_options = {
'modes': ['text', 'code', 'tree', 'form', 'view'],
'mode': mode,
'search': True,
}
if options:
default_options.update(options)
self.options = default_options
self.width = width
self.height = height
super(JSONEditorWidget, self).__init__(attrs=attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['options'] = json.dumps(self.options)
context['widget']['width'] = self.width
context['widget']['height'] = self.height
return context
|
Stop resolving paths to the static files.
|
Stop resolving paths to the static files.
Fixed #33
|
Python
|
mit
|
jmrivas86/django-json-widget,jmrivas86/django-json-widget
|
import json
from builtins import super
from django import forms
- from django.templatetags.static import static
class JSONEditorWidget(forms.Widget):
class Media:
- css = {'all': (static('dist/jsoneditor.min.css'), )}
+ css = {'all': ('dist/jsoneditor.min.css', )}
- js = (static('dist/jsoneditor.min.js'),)
+ js = ('dist/jsoneditor.min.js',)
template_name = 'django_json_widget.html'
def __init__(self, attrs=None, mode='code', options=None, width=None, height=None):
default_options = {
'modes': ['text', 'code', 'tree', 'form', 'view'],
'mode': mode,
'search': True,
}
if options:
default_options.update(options)
self.options = default_options
self.width = width
self.height = height
super(JSONEditorWidget, self).__init__(attrs=attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['options'] = json.dumps(self.options)
context['widget']['width'] = self.width
context['widget']['height'] = self.height
return context
|
Stop resolving paths to the static files.
|
## Code Before:
import json
from builtins import super
from django import forms
from django.templatetags.static import static
class JSONEditorWidget(forms.Widget):
class Media:
css = {'all': (static('dist/jsoneditor.min.css'), )}
js = (static('dist/jsoneditor.min.js'),)
template_name = 'django_json_widget.html'
def __init__(self, attrs=None, mode='code', options=None, width=None, height=None):
default_options = {
'modes': ['text', 'code', 'tree', 'form', 'view'],
'mode': mode,
'search': True,
}
if options:
default_options.update(options)
self.options = default_options
self.width = width
self.height = height
super(JSONEditorWidget, self).__init__(attrs=attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['options'] = json.dumps(self.options)
context['widget']['width'] = self.width
context['widget']['height'] = self.height
return context
## Instruction:
Stop resolving paths to the static files.
## Code After:
import json
from builtins import super
from django import forms
class JSONEditorWidget(forms.Widget):
class Media:
css = {'all': ('dist/jsoneditor.min.css', )}
js = ('dist/jsoneditor.min.js',)
template_name = 'django_json_widget.html'
def __init__(self, attrs=None, mode='code', options=None, width=None, height=None):
default_options = {
'modes': ['text', 'code', 'tree', 'form', 'view'],
'mode': mode,
'search': True,
}
if options:
default_options.update(options)
self.options = default_options
self.width = width
self.height = height
super(JSONEditorWidget, self).__init__(attrs=attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['options'] = json.dumps(self.options)
context['widget']['width'] = self.width
context['widget']['height'] = self.height
return context
|
import json
from builtins import super
from django import forms
- from django.templatetags.static import static
class JSONEditorWidget(forms.Widget):
class Media:
- css = {'all': (static('dist/jsoneditor.min.css'), )}
? ------- -
+ css = {'all': ('dist/jsoneditor.min.css', )}
- js = (static('dist/jsoneditor.min.js'),)
? ------- -
+ js = ('dist/jsoneditor.min.js',)
template_name = 'django_json_widget.html'
def __init__(self, attrs=None, mode='code', options=None, width=None, height=None):
default_options = {
'modes': ['text', 'code', 'tree', 'form', 'view'],
'mode': mode,
'search': True,
}
if options:
default_options.update(options)
self.options = default_options
self.width = width
self.height = height
super(JSONEditorWidget, self).__init__(attrs=attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['options'] = json.dumps(self.options)
context['widget']['width'] = self.width
context['widget']['height'] = self.height
return context
|
3fa9a7c62aeae10a191b3782e32df107618c19b3
|
boris/reporting/forms.py
|
boris/reporting/forms.py
|
'''
Created on 3.12.2011
@author: xaralis
'''
from django import forms
from django.utils.translation import ugettext_lazy as _
from boris.utils.widgets import SelectYearWidget
class MonthlyStatsForm(forms.Form):
year = forms.IntegerField(widget=SelectYearWidget(history=10), label=_(u'Rok'))
class ServiceForm(forms.Form):
date_from = forms.DateField(label=_(u'Od'))
date_to = forms.DateField(label=_(u'Do'))
|
'''
Created on 3.12.2011
@author: xaralis
'''
from django import forms
from django.utils.translation import ugettext_lazy as _
from boris.utils.widgets import SelectYearWidget
class MonthlyStatsForm(forms.Form):
year = forms.IntegerField(widget=SelectYearWidget(history=10), label=_(u'Rok'))
class ServiceForm(forms.Form):
date_from = forms.DateField(label=_(u'Od'), required=False)
date_to = forms.DateField(label=_(u'Do'), required=False)
|
Make dates in the ServiceForm optional.
|
Make dates in the ServiceForm optional.
|
Python
|
mit
|
fragaria/BorIS,fragaria/BorIS,fragaria/BorIS
|
'''
Created on 3.12.2011
@author: xaralis
'''
from django import forms
from django.utils.translation import ugettext_lazy as _
from boris.utils.widgets import SelectYearWidget
class MonthlyStatsForm(forms.Form):
year = forms.IntegerField(widget=SelectYearWidget(history=10), label=_(u'Rok'))
class ServiceForm(forms.Form):
- date_from = forms.DateField(label=_(u'Od'))
+ date_from = forms.DateField(label=_(u'Od'), required=False)
- date_to = forms.DateField(label=_(u'Do'))
+ date_to = forms.DateField(label=_(u'Do'), required=False)
|
Make dates in the ServiceForm optional.
|
## Code Before:
'''
Created on 3.12.2011
@author: xaralis
'''
from django import forms
from django.utils.translation import ugettext_lazy as _
from boris.utils.widgets import SelectYearWidget
class MonthlyStatsForm(forms.Form):
year = forms.IntegerField(widget=SelectYearWidget(history=10), label=_(u'Rok'))
class ServiceForm(forms.Form):
date_from = forms.DateField(label=_(u'Od'))
date_to = forms.DateField(label=_(u'Do'))
## Instruction:
Make dates in the ServiceForm optional.
## Code After:
'''
Created on 3.12.2011
@author: xaralis
'''
from django import forms
from django.utils.translation import ugettext_lazy as _
from boris.utils.widgets import SelectYearWidget
class MonthlyStatsForm(forms.Form):
year = forms.IntegerField(widget=SelectYearWidget(history=10), label=_(u'Rok'))
class ServiceForm(forms.Form):
date_from = forms.DateField(label=_(u'Od'), required=False)
date_to = forms.DateField(label=_(u'Do'), required=False)
|
'''
Created on 3.12.2011
@author: xaralis
'''
from django import forms
from django.utils.translation import ugettext_lazy as _
from boris.utils.widgets import SelectYearWidget
class MonthlyStatsForm(forms.Form):
year = forms.IntegerField(widget=SelectYearWidget(history=10), label=_(u'Rok'))
class ServiceForm(forms.Form):
- date_from = forms.DateField(label=_(u'Od'))
+ date_from = forms.DateField(label=_(u'Od'), required=False)
? ++++++++++++++++
- date_to = forms.DateField(label=_(u'Do'))
+ date_to = forms.DateField(label=_(u'Do'), required=False)
? ++++++++++++++++
|
620d59b82e8a5c98fd4568217c74b485f802ac94
|
tests/integration/test_authentication.py
|
tests/integration/test_authentication.py
|
from pyutrack import Connection
from pyutrack import Credentials
from pyutrack.errors import LoginError
from tests.integration import IntegrationTest
class AuthenticationTests(IntegrationTest):
def test_successful_authentication(self):
connection = Connection(
credentials=Credentials(username='root', password='root'),
base_url='http://localhost:9876'
)
self.assertTrue(connection.login())
def test_invalid_password(self):
connection = Connection(
credentials=Credentials(username='root', password='rooted'),
base_url='http://localhost:9876'
)
self.assertRaises(connection.login, LoginError)
|
from pyutrack import Connection
from pyutrack import Credentials
from pyutrack.errors import LoginError
from tests.integration import IntegrationTest
class AuthenticationTests(IntegrationTest):
def test_successful_authentication(self):
connection = Connection(
credentials=Credentials(username='root', password='root'),
base_url='http://localhost:9876'
)
self.assertTrue(connection.login())
def test_invalid_password(self):
connection = Connection(
credentials=Credentials(username='root', password='rooted'),
base_url='http://localhost:9876'
)
self.assertRaises(LoginError, connection.login)
|
Fix order of assertion arguments
|
Fix order of assertion arguments
|
Python
|
mit
|
alisaifee/pyutrack,alisaifee/pyutrack
|
from pyutrack import Connection
from pyutrack import Credentials
from pyutrack.errors import LoginError
from tests.integration import IntegrationTest
class AuthenticationTests(IntegrationTest):
def test_successful_authentication(self):
connection = Connection(
credentials=Credentials(username='root', password='root'),
base_url='http://localhost:9876'
)
self.assertTrue(connection.login())
def test_invalid_password(self):
connection = Connection(
credentials=Credentials(username='root', password='rooted'),
base_url='http://localhost:9876'
)
- self.assertRaises(connection.login, LoginError)
+ self.assertRaises(LoginError, connection.login)
|
Fix order of assertion arguments
|
## Code Before:
from pyutrack import Connection
from pyutrack import Credentials
from pyutrack.errors import LoginError
from tests.integration import IntegrationTest
class AuthenticationTests(IntegrationTest):
def test_successful_authentication(self):
connection = Connection(
credentials=Credentials(username='root', password='root'),
base_url='http://localhost:9876'
)
self.assertTrue(connection.login())
def test_invalid_password(self):
connection = Connection(
credentials=Credentials(username='root', password='rooted'),
base_url='http://localhost:9876'
)
self.assertRaises(connection.login, LoginError)
## Instruction:
Fix order of assertion arguments
## Code After:
from pyutrack import Connection
from pyutrack import Credentials
from pyutrack.errors import LoginError
from tests.integration import IntegrationTest
class AuthenticationTests(IntegrationTest):
def test_successful_authentication(self):
connection = Connection(
credentials=Credentials(username='root', password='root'),
base_url='http://localhost:9876'
)
self.assertTrue(connection.login())
def test_invalid_password(self):
connection = Connection(
credentials=Credentials(username='root', password='rooted'),
base_url='http://localhost:9876'
)
self.assertRaises(LoginError, connection.login)
|
from pyutrack import Connection
from pyutrack import Credentials
from pyutrack.errors import LoginError
from tests.integration import IntegrationTest
class AuthenticationTests(IntegrationTest):
def test_successful_authentication(self):
connection = Connection(
credentials=Credentials(username='root', password='root'),
base_url='http://localhost:9876'
)
self.assertTrue(connection.login())
def test_invalid_password(self):
connection = Connection(
credentials=Credentials(username='root', password='rooted'),
base_url='http://localhost:9876'
)
- self.assertRaises(connection.login, LoginError)
? ------------
+ self.assertRaises(LoginError, connection.login)
? ++++++++++++
|
d8375d3e3a4a00598ac0cdc38861be9f56fb58c0
|
edison/tests/sanity_tests.py
|
edison/tests/sanity_tests.py
|
from edison.tests import unittest
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
|
from edison.tests import unittest
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
self.assertFalse(False)
|
Add another inane test to trigger Landscape
|
Add another inane test to trigger Landscape
|
Python
|
mit
|
briancline/edison
|
from edison.tests import unittest
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
+ self.assertFalse(False)
|
Add another inane test to trigger Landscape
|
## Code Before:
from edison.tests import unittest
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
## Instruction:
Add another inane test to trigger Landscape
## Code After:
from edison.tests import unittest
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
self.assertFalse(False)
|
from edison.tests import unittest
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
+ self.assertFalse(False)
|
882fc867ab115f2b84f2f185bcebf3eb4a1d2fc8
|
core/forms.py
|
core/forms.py
|
from django.forms import ModelForm
from django.forms.fields import CharField
from models import UserProfile
class UserProfileForm(ModelForm):
first_name = CharField(label='First name', required=False)
last_name = CharField(label='Last name', required=False)
class Meta:
model = UserProfile
# Don't allow users edit someone else's user page,
# or to whitelist themselves
exclude = ('user', 'whitelisted',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
if self.instance:
self.fields['first_name'].initial = self.instance.user.first_name
self.fields['last_name'].initial = self.instance.user.last_name
def save(self):
first_name = self.cleaned_data.pop('first_name', None)
last_name = self.cleaned_data.pop('last_name', None)
profile = super(UserProfileForm, self).save()
user = profile.user
user.first_name = first_name
user.last_name = last_name
user.save()
return profile
|
from django.forms import ModelForm
from django.forms.fields import CharField
from models import UserProfile
class UserProfileForm(ModelForm):
first_name = CharField(label='First name', required=False)
last_name = CharField(label='Last name', required=False)
class Meta:
model = UserProfile
# Don't allow users edit someone else's user page,
# or to whitelist themselves
exclude = ('user', 'whitelisted',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
if self.is_bound:
self.fields['first_name'].initial = self.instance.user.first_name
self.fields['last_name'].initial = self.instance.user.last_name
def save(self):
first_name = self.cleaned_data.pop('first_name', None)
last_name = self.cleaned_data.pop('last_name', None)
profile = super(UserProfileForm, self).save()
user = profile.user
user.first_name = first_name
user.last_name = last_name
user.save()
return profile
|
Fix profile creation. (Need tests badly).
|
Fix profile creation. (Need tests badly).
|
Python
|
mit
|
kenwang76/readthedocs.org,soulshake/readthedocs.org,nyergler/pythonslides,gjtorikian/readthedocs.org,tddv/readthedocs.org,kenshinthebattosai/readthedocs.org,ojii/readthedocs.org,LukasBoersma/readthedocs.org,mhils/readthedocs.org,sid-kap/readthedocs.org,michaelmcandrew/readthedocs.org,michaelmcandrew/readthedocs.org,ojii/readthedocs.org,royalwang/readthedocs.org,asampat3090/readthedocs.org,SteveViss/readthedocs.org,ojii/readthedocs.org,safwanrahman/readthedocs.org,kdkeyser/readthedocs.org,wanghaven/readthedocs.org,KamranMackey/readthedocs.org,johncosta/private-readthedocs.org,fujita-shintaro/readthedocs.org,Tazer/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,CedarLogic/readthedocs.org,wanghaven/readthedocs.org,davidfischer/readthedocs.org,wijerasa/readthedocs.org,CedarLogic/readthedocs.org,stevepiercy/readthedocs.org,gjtorikian/readthedocs.org,nyergler/pythonslides,takluyver/readthedocs.org,emawind84/readthedocs.org,sils1297/readthedocs.org,attakei/readthedocs-oauth,KamranMackey/readthedocs.org,dirn/readthedocs.org,titiushko/readthedocs.org,wijerasa/readthedocs.org,davidfischer/readthedocs.org,GovReady/readthedocs.org,VishvajitP/readthedocs.org,davidfischer/readthedocs.org,wanghaven/readthedocs.org,raven47git/readthedocs.org,clarkperkins/readthedocs.org,sid-kap/readthedocs.org,LukasBoersma/readthedocs.org,kenshinthebattosai/readthedocs.org,espdev/readthedocs.org,Tazer/readthedocs.org,SteveViss/readthedocs.org,davidfischer/readthedocs.org,mrshoki/readthedocs.org,soulshake/readthedocs.org,johncosta/private-readthedocs.org,singingwolfboy/readthedocs.org,espdev/readthedocs.org,GovReady/readthedocs.org,takluyver/readthedocs.org,d0ugal/readthedocs.org,asampat3090/readthedocs.org,royalwang/readthedocs.org,michaelmcandrew/readthedocs.org,SteveViss/readthedocs.org,royalwang/readthedocs.org,nikolas/readthedocs.org,istresearch/readthedocs.org,wijerasa/readthedocs.org,CedarLogic/readthedocs.org,techtonik/readthedocs.org,agjohnson/readthedocs.org,istresearch/readthedocs.org,dirn/readthedocs.org,Carreau/readthedocs.org,raven47git/readthedocs.org,raven47git/readthedocs.org,alex/readthedocs.org,emawind84/readthedocs.org,sunnyzwh/readthedocs.org,cgourlay/readthedocs.org,hach-que/readthedocs.org,Tazer/readthedocs.org,kenwang76/readthedocs.org,sils1297/readthedocs.org,kdkeyser/readthedocs.org,alex/readthedocs.org,VishvajitP/readthedocs.org,attakei/readthedocs-oauth,espdev/readthedocs.org,agjohnson/readthedocs.org,soulshake/readthedocs.org,tddv/readthedocs.org,sils1297/readthedocs.org,techtonik/readthedocs.org,soulshake/readthedocs.org,sunnyzwh/readthedocs.org,Carreau/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,attakei/readthedocs-oauth,hach-que/readthedocs.org,atsuyim/readthedocs.org,stevepiercy/readthedocs.org,ojii/readthedocs.org,singingwolfboy/readthedocs.org,tddv/readthedocs.org,rtfd/readthedocs.org,espdev/readthedocs.org,KamranMackey/readthedocs.org,nyergler/pythonslides,techtonik/readthedocs.org,mrshoki/readthedocs.org,attakei/readthedocs-oauth,mhils/readthedocs.org,atsuyim/readthedocs.org,nikolas/readthedocs.org,johncosta/private-readthedocs.org,dirn/readthedocs.org,kenshinthebattosai/readthedocs.org,pombredanne/readthedocs.org,Carreau/readthedocs.org,jerel/readthedocs.org,nikolas/readthedocs.org,takluyver/readthedocs.org,titiushko/readthedocs.org,takluyver/readthedocs.org,laplaceliu/readthedocs.org,fujita-shintaro/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,hach-que/readthedocs.org,espdev/readthedocs.org,laplaceliu/readthedocs.org,rtfd/readthedocs.org,gjtorikian/readthedocs.org,LukasBoersma/readthedocs.org,Carreau/readthedocs.org,sid-kap/readthedocs.org,jerel/readthedocs.org,sils1297/readthedocs.org,safwanrahman/readthedocs.org,clarkperkins/readthedocs.org,gjtorikian/readthedocs.org,kenshinthebattosai/readthedocs.org,alex/readthedocs.org,jerel/readthedocs.org,Tazer/readthedocs.org,atsuyim/readthedocs.org,VishvajitP/readthedocs.org,d0ugal/readthedocs.org,sunnyzwh/readthedocs.org,clarkperkins/readthedocs.org,stevepiercy/readthedocs.org,istresearch/readthedocs.org,GovReady/readthedocs.org,SteveViss/readthedocs.org,titiushko/readthedocs.org,agjohnson/readthedocs.org,fujita-shintaro/readthedocs.org,mrshoki/readthedocs.org,KamranMackey/readthedocs.org,stevepiercy/readthedocs.org,mhils/readthedocs.org,mhils/readthedocs.org,singingwolfboy/readthedocs.org,royalwang/readthedocs.org,safwanrahman/readthedocs.org,sid-kap/readthedocs.org,LukasBoersma/readthedocs.org,dirn/readthedocs.org,kenwang76/readthedocs.org,sunnyzwh/readthedocs.org,GovReady/readthedocs.org,emawind84/readthedocs.org,asampat3090/readthedocs.org,jerel/readthedocs.org,cgourlay/readthedocs.org,asampat3090/readthedocs.org,singingwolfboy/readthedocs.org,cgourlay/readthedocs.org,CedarLogic/readthedocs.org,safwanrahman/readthedocs.org,kdkeyser/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,kdkeyser/readthedocs.org,rtfd/readthedocs.org,wanghaven/readthedocs.org,kenwang76/readthedocs.org,pombredanne/readthedocs.org,emawind84/readthedocs.org,fujita-shintaro/readthedocs.org,wijerasa/readthedocs.org,nikolas/readthedocs.org,michaelmcandrew/readthedocs.org,mrshoki/readthedocs.org,d0ugal/readthedocs.org,atsuyim/readthedocs.org,laplaceliu/readthedocs.org,agjohnson/readthedocs.org,raven47git/readthedocs.org,alex/readthedocs.org,laplaceliu/readthedocs.org,nyergler/pythonslides,VishvajitP/readthedocs.org
|
from django.forms import ModelForm
from django.forms.fields import CharField
from models import UserProfile
class UserProfileForm(ModelForm):
first_name = CharField(label='First name', required=False)
last_name = CharField(label='Last name', required=False)
-
+
class Meta:
model = UserProfile
# Don't allow users edit someone else's user page,
# or to whitelist themselves
exclude = ('user', 'whitelisted',)
-
+
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
- if self.instance:
+ if self.is_bound:
self.fields['first_name'].initial = self.instance.user.first_name
self.fields['last_name'].initial = self.instance.user.last_name
def save(self):
first_name = self.cleaned_data.pop('first_name', None)
last_name = self.cleaned_data.pop('last_name', None)
profile = super(UserProfileForm, self).save()
user = profile.user
user.first_name = first_name
user.last_name = last_name
user.save()
return profile
+
|
Fix profile creation. (Need tests badly).
|
## Code Before:
from django.forms import ModelForm
from django.forms.fields import CharField
from models import UserProfile
class UserProfileForm(ModelForm):
first_name = CharField(label='First name', required=False)
last_name = CharField(label='Last name', required=False)
class Meta:
model = UserProfile
# Don't allow users edit someone else's user page,
# or to whitelist themselves
exclude = ('user', 'whitelisted',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
if self.instance:
self.fields['first_name'].initial = self.instance.user.first_name
self.fields['last_name'].initial = self.instance.user.last_name
def save(self):
first_name = self.cleaned_data.pop('first_name', None)
last_name = self.cleaned_data.pop('last_name', None)
profile = super(UserProfileForm, self).save()
user = profile.user
user.first_name = first_name
user.last_name = last_name
user.save()
return profile
## Instruction:
Fix profile creation. (Need tests badly).
## Code After:
from django.forms import ModelForm
from django.forms.fields import CharField
from models import UserProfile
class UserProfileForm(ModelForm):
first_name = CharField(label='First name', required=False)
last_name = CharField(label='Last name', required=False)
class Meta:
model = UserProfile
# Don't allow users edit someone else's user page,
# or to whitelist themselves
exclude = ('user', 'whitelisted',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
if self.is_bound:
self.fields['first_name'].initial = self.instance.user.first_name
self.fields['last_name'].initial = self.instance.user.last_name
def save(self):
first_name = self.cleaned_data.pop('first_name', None)
last_name = self.cleaned_data.pop('last_name', None)
profile = super(UserProfileForm, self).save()
user = profile.user
user.first_name = first_name
user.last_name = last_name
user.save()
return profile
|
from django.forms import ModelForm
from django.forms.fields import CharField
from models import UserProfile
class UserProfileForm(ModelForm):
first_name = CharField(label='First name', required=False)
last_name = CharField(label='Last name', required=False)
-
+
class Meta:
model = UserProfile
# Don't allow users edit someone else's user page,
# or to whitelist themselves
exclude = ('user', 'whitelisted',)
-
+
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
- if self.instance:
? ^^^^^^
+ if self.is_bound:
? +++++ ^
self.fields['first_name'].initial = self.instance.user.first_name
self.fields['last_name'].initial = self.instance.user.last_name
def save(self):
first_name = self.cleaned_data.pop('first_name', None)
last_name = self.cleaned_data.pop('last_name', None)
profile = super(UserProfileForm, self).save()
user = profile.user
user.first_name = first_name
user.last_name = last_name
user.save()
return profile
|
5ffdfd7eb103d6974c3fb782eecaf457f53c972f
|
setup.py
|
setup.py
|
import os
from distutils.core import setup
version = '0.9.3'
def read_file(name):
return open(os.path.join(os.path.dirname(__file__),
name)).read()
readme = read_file('README.rst')
changes = read_file('CHANGES')
setup(
name='django-maintenancemode',
version=version,
description='Django-maintenancemode allows you to temporary shutdown your site for maintenance work',
long_description='\n\n'.join([readme, changes]),
author='Remco Wendt',
author_email='[email protected]',
license="BSD",
platforms=["any"],
url='https://github.com/shanx/django-maintenancemode',
packages=[
'maintenancemode',
'maintenancemode.conf',
'maintenancemode.conf.settings',
'maintenancemode.conf.urls',
'maintenancemode.tests',
'maintenancemode.views',
],
package_data={
'maintenancemode': [
'tests/templates/503.html',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
)
|
import os
from distutils.core import setup
version = '0.9.3'
here = os.path.abspath(os.path.dirname(__file__))
def read_file(name):
return open(os.path.join(here, name)).read()
readme = read_file('README.rst')
changes = read_file('CHANGES')
setup(
name='django-maintenancemode',
version=version,
description='Django-maintenancemode allows you to temporary shutdown your site for maintenance work',
long_description='\n\n'.join([readme, changes]),
author='Remco Wendt',
author_email='[email protected]',
license="BSD",
platforms=["any"],
url='https://github.com/shanx/django-maintenancemode',
packages=[
'maintenancemode',
'maintenancemode.conf',
'maintenancemode.conf.settings',
'maintenancemode.conf.urls',
'maintenancemode.tests',
'maintenancemode.views',
],
package_data={
'maintenancemode': [
'tests/templates/503.html',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
)
|
Use the absolute path for the long description to work around CI issues.
|
Use the absolute path for the long description to work around CI issues.
|
Python
|
bsd-3-clause
|
aarsan/django-maintenancemode,shanx/django-maintenancemode,aarsan/django-maintenancemode,21strun/django-maintenancemode,shanx/django-maintenancemode,21strun/django-maintenancemode
|
import os
from distutils.core import setup
version = '0.9.3'
+ here = os.path.abspath(os.path.dirname(__file__))
def read_file(name):
+ return open(os.path.join(here, name)).read()
- return open(os.path.join(os.path.dirname(__file__),
- name)).read()
readme = read_file('README.rst')
changes = read_file('CHANGES')
setup(
name='django-maintenancemode',
version=version,
description='Django-maintenancemode allows you to temporary shutdown your site for maintenance work',
long_description='\n\n'.join([readme, changes]),
author='Remco Wendt',
author_email='[email protected]',
license="BSD",
platforms=["any"],
url='https://github.com/shanx/django-maintenancemode',
packages=[
'maintenancemode',
'maintenancemode.conf',
'maintenancemode.conf.settings',
'maintenancemode.conf.urls',
'maintenancemode.tests',
'maintenancemode.views',
],
package_data={
'maintenancemode': [
'tests/templates/503.html',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
)
|
Use the absolute path for the long description to work around CI issues.
|
## Code Before:
import os
from distutils.core import setup
version = '0.9.3'
def read_file(name):
return open(os.path.join(os.path.dirname(__file__),
name)).read()
readme = read_file('README.rst')
changes = read_file('CHANGES')
setup(
name='django-maintenancemode',
version=version,
description='Django-maintenancemode allows you to temporary shutdown your site for maintenance work',
long_description='\n\n'.join([readme, changes]),
author='Remco Wendt',
author_email='[email protected]',
license="BSD",
platforms=["any"],
url='https://github.com/shanx/django-maintenancemode',
packages=[
'maintenancemode',
'maintenancemode.conf',
'maintenancemode.conf.settings',
'maintenancemode.conf.urls',
'maintenancemode.tests',
'maintenancemode.views',
],
package_data={
'maintenancemode': [
'tests/templates/503.html',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
)
## Instruction:
Use the absolute path for the long description to work around CI issues.
## Code After:
import os
from distutils.core import setup
version = '0.9.3'
here = os.path.abspath(os.path.dirname(__file__))
def read_file(name):
return open(os.path.join(here, name)).read()
readme = read_file('README.rst')
changes = read_file('CHANGES')
setup(
name='django-maintenancemode',
version=version,
description='Django-maintenancemode allows you to temporary shutdown your site for maintenance work',
long_description='\n\n'.join([readme, changes]),
author='Remco Wendt',
author_email='[email protected]',
license="BSD",
platforms=["any"],
url='https://github.com/shanx/django-maintenancemode',
packages=[
'maintenancemode',
'maintenancemode.conf',
'maintenancemode.conf.settings',
'maintenancemode.conf.urls',
'maintenancemode.tests',
'maintenancemode.views',
],
package_data={
'maintenancemode': [
'tests/templates/503.html',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
)
|
import os
from distutils.core import setup
version = '0.9.3'
+ here = os.path.abspath(os.path.dirname(__file__))
def read_file(name):
+ return open(os.path.join(here, name)).read()
- return open(os.path.join(os.path.dirname(__file__),
- name)).read()
readme = read_file('README.rst')
changes = read_file('CHANGES')
setup(
name='django-maintenancemode',
version=version,
description='Django-maintenancemode allows you to temporary shutdown your site for maintenance work',
long_description='\n\n'.join([readme, changes]),
author='Remco Wendt',
author_email='[email protected]',
license="BSD",
platforms=["any"],
url='https://github.com/shanx/django-maintenancemode',
packages=[
'maintenancemode',
'maintenancemode.conf',
'maintenancemode.conf.settings',
'maintenancemode.conf.urls',
'maintenancemode.tests',
'maintenancemode.views',
],
package_data={
'maintenancemode': [
'tests/templates/503.html',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
)
|
b0ed850da2573cd8a99fc9f628f2da8a3bc97c71
|
greenmine/base/monkey.py
|
greenmine/base/monkey.py
|
from rest_framework import views
from rest_framework import status, exceptions
from rest_framework.response import Response
def patch_api_view():
from django.views.generic import View
if hasattr(views, "_patched"):
return
views._APIView = views.APIView
views._patched = True
class APIView(views.APIView):
def handle_exception(self, exc):
if isinstance(exc, exceptions.NotAuthenticated):
return Response({'detail': 'Not authenticated'},
status=status.HTTP_401_UNAUTHORIZED,
exception=True)
return super(APIView, self).handle_exception(exc)
@classmethod
def as_view(cls, **initkwargs):
view = super(views._APIView, cls).as_view(**initkwargs)
view.cls_instance = cls(**initkwargs)
return view
print "Patching APIView"
views.APIView = APIView
|
from __future__ import print_function
import sys
from rest_framework import views
from rest_framework import status, exceptions
from rest_framework.response import Response
def patch_api_view():
from django.views.generic import View
if hasattr(views, "_patched"):
return
views._APIView = views.APIView
views._patched = True
class APIView(views.APIView):
def handle_exception(self, exc):
if isinstance(exc, exceptions.NotAuthenticated):
return Response({'detail': 'Not authenticated'},
status=status.HTTP_401_UNAUTHORIZED,
exception=True)
return super(APIView, self).handle_exception(exc)
@classmethod
def as_view(cls, **initkwargs):
view = super(views._APIView, cls).as_view(**initkwargs)
view.cls_instance = cls(**initkwargs)
return view
print("Patching APIView", file=sys.stderr)
views.APIView = APIView
|
Send print message to sys.stderr
|
Smallfix: Send print message to sys.stderr
|
Python
|
agpl-3.0
|
EvgeneOskin/taiga-back,taigaio/taiga-back,rajiteh/taiga-back,Zaneh-/bearded-tribble-back,gauravjns/taiga-back,obimod/taiga-back,dycodedev/taiga-back,WALR/taiga-back,joshisa/taiga-back,bdang2012/taiga-back-casting,Rademade/taiga-back,CMLL/taiga-back,crr0004/taiga-back,taigaio/taiga-back,obimod/taiga-back,dayatz/taiga-back,Zaneh-/bearded-tribble-back,seanchen/taiga-back,WALR/taiga-back,forging2012/taiga-back,CMLL/taiga-back,coopsource/taiga-back,EvgeneOskin/taiga-back,gauravjns/taiga-back,19kestier/taiga-back,xdevelsistemas/taiga-back-community,gauravjns/taiga-back,Rademade/taiga-back,CoolCloud/taiga-back,xdevelsistemas/taiga-back-community,CMLL/taiga-back,jeffdwyatt/taiga-back,Rademade/taiga-back,dycodedev/taiga-back,dayatz/taiga-back,EvgeneOskin/taiga-back,gam-phon/taiga-back,frt-arch/taiga-back,forging2012/taiga-back,19kestier/taiga-back,CoolCloud/taiga-back,gam-phon/taiga-back,Rademade/taiga-back,astagi/taiga-back,Tigerwhit4/taiga-back,dycodedev/taiga-back,astronaut1712/taiga-back,seanchen/taiga-back,Tigerwhit4/taiga-back,astagi/taiga-back,19kestier/taiga-back,bdang2012/taiga-back-casting,crr0004/taiga-back,gauravjns/taiga-back,crr0004/taiga-back,gam-phon/taiga-back,CoolCloud/taiga-back,jeffdwyatt/taiga-back,Tigerwhit4/taiga-back,astagi/taiga-back,obimod/taiga-back,forging2012/taiga-back,EvgeneOskin/taiga-back,rajiteh/taiga-back,crr0004/taiga-back,seanchen/taiga-back,CMLL/taiga-back,rajiteh/taiga-back,joshisa/taiga-back,taigaio/taiga-back,forging2012/taiga-back,coopsource/taiga-back,astronaut1712/taiga-back,Zaneh-/bearded-tribble-back,dayatz/taiga-back,joshisa/taiga-back,gam-phon/taiga-back,jeffdwyatt/taiga-back,bdang2012/taiga-back-casting,coopsource/taiga-back,obimod/taiga-back,WALR/taiga-back,coopsource/taiga-back,bdang2012/taiga-back-casting,joshisa/taiga-back,astronaut1712/taiga-back,CoolCloud/taiga-back,astagi/taiga-back,dycodedev/taiga-back,seanchen/taiga-back,Rademade/taiga-back,xdevelsistemas/taiga-back-community,WALR/taiga-back,rajiteh/taiga-back,Tigerwhit4/taiga-back,frt-arch/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,frt-arch/taiga-back
|
+ from __future__ import print_function
+ import sys
from rest_framework import views
from rest_framework import status, exceptions
from rest_framework.response import Response
def patch_api_view():
from django.views.generic import View
if hasattr(views, "_patched"):
return
views._APIView = views.APIView
views._patched = True
class APIView(views.APIView):
def handle_exception(self, exc):
if isinstance(exc, exceptions.NotAuthenticated):
return Response({'detail': 'Not authenticated'},
status=status.HTTP_401_UNAUTHORIZED,
exception=True)
return super(APIView, self).handle_exception(exc)
@classmethod
def as_view(cls, **initkwargs):
view = super(views._APIView, cls).as_view(**initkwargs)
view.cls_instance = cls(**initkwargs)
return view
- print "Patching APIView"
+ print("Patching APIView", file=sys.stderr)
views.APIView = APIView
|
Send print message to sys.stderr
|
## Code Before:
from rest_framework import views
from rest_framework import status, exceptions
from rest_framework.response import Response
def patch_api_view():
from django.views.generic import View
if hasattr(views, "_patched"):
return
views._APIView = views.APIView
views._patched = True
class APIView(views.APIView):
def handle_exception(self, exc):
if isinstance(exc, exceptions.NotAuthenticated):
return Response({'detail': 'Not authenticated'},
status=status.HTTP_401_UNAUTHORIZED,
exception=True)
return super(APIView, self).handle_exception(exc)
@classmethod
def as_view(cls, **initkwargs):
view = super(views._APIView, cls).as_view(**initkwargs)
view.cls_instance = cls(**initkwargs)
return view
print "Patching APIView"
views.APIView = APIView
## Instruction:
Send print message to sys.stderr
## Code After:
from __future__ import print_function
import sys
from rest_framework import views
from rest_framework import status, exceptions
from rest_framework.response import Response
def patch_api_view():
from django.views.generic import View
if hasattr(views, "_patched"):
return
views._APIView = views.APIView
views._patched = True
class APIView(views.APIView):
def handle_exception(self, exc):
if isinstance(exc, exceptions.NotAuthenticated):
return Response({'detail': 'Not authenticated'},
status=status.HTTP_401_UNAUTHORIZED,
exception=True)
return super(APIView, self).handle_exception(exc)
@classmethod
def as_view(cls, **initkwargs):
view = super(views._APIView, cls).as_view(**initkwargs)
view.cls_instance = cls(**initkwargs)
return view
print("Patching APIView", file=sys.stderr)
views.APIView = APIView
|
+ from __future__ import print_function
+ import sys
from rest_framework import views
from rest_framework import status, exceptions
from rest_framework.response import Response
def patch_api_view():
from django.views.generic import View
if hasattr(views, "_patched"):
return
views._APIView = views.APIView
views._patched = True
class APIView(views.APIView):
def handle_exception(self, exc):
if isinstance(exc, exceptions.NotAuthenticated):
return Response({'detail': 'Not authenticated'},
status=status.HTTP_401_UNAUTHORIZED,
exception=True)
return super(APIView, self).handle_exception(exc)
@classmethod
def as_view(cls, **initkwargs):
view = super(views._APIView, cls).as_view(**initkwargs)
view.cls_instance = cls(**initkwargs)
return view
- print "Patching APIView"
+ print("Patching APIView", file=sys.stderr)
views.APIView = APIView
|
03bca9051114a936b584632a72242ca023cbde3e
|
openslides/utils/csv_ext.py
|
openslides/utils/csv_ext.py
|
from csv import Dialect, excel, register_dialect
class excel_semikolon(Dialect):
delimiter = ';'
doublequote = True
lineterminator = '\r\n'
quotechar = '"'
quoting = 0
skipinitialspace = False
def patchup(dialect):
if dialect:
if dialect.delimiter == excel_semikolon.delimiter and \
dialect.quotechar == excel_semikolon.quotechar:
# walks like a duck and talks like a duck.. must be one
dialect.doublequote = True
return dialect
register_dialect("excel_semikolon", excel_semikolon)
|
from csv import Dialect, excel, register_dialect
class excel_semikolon(Dialect):
delimiter = ';'
doublequote = True
lineterminator = '\r\n'
quotechar = '"'
quoting = 0
skipinitialspace = False
def patchup(dialect):
if dialect:
if dialect.delimiter in [excel_semikolon.delimiter, excel.delimiter] and \
dialect.quotechar == excel_semikolon.quotechar:
# walks like a duck and talks like a duck.. must be one
dialect.doublequote = True
return dialect
register_dialect("excel_semikolon", excel_semikolon)
|
Extend patchup for builtin excel dialect
|
Extend patchup for builtin excel dialect
|
Python
|
mit
|
ostcar/OpenSlides,normanjaeckel/OpenSlides,tsiegleauq/OpenSlides,jwinzer/OpenSlides,normanjaeckel/OpenSlides,OpenSlides/OpenSlides,emanuelschuetze/OpenSlides,CatoTH/OpenSlides,jwinzer/OpenSlides,rolandgeider/OpenSlides,emanuelschuetze/OpenSlides,OpenSlides/OpenSlides,jwinzer/OpenSlides,ostcar/OpenSlides,FinnStutzenstein/OpenSlides,boehlke/OpenSlides,boehlke/OpenSlides,tsiegleauq/OpenSlides,FinnStutzenstein/OpenSlides,rolandgeider/OpenSlides,rolandgeider/OpenSlides,emanuelschuetze/OpenSlides,boehlke/OpenSlides,normanjaeckel/OpenSlides,jwinzer/OpenSlides,normanjaeckel/OpenSlides,CatoTH/OpenSlides,FinnStutzenstein/OpenSlides,jwinzer/OpenSlides,ostcar/OpenSlides,CatoTH/OpenSlides,tsiegleauq/OpenSlides,FinnStutzenstein/OpenSlides,CatoTH/OpenSlides,boehlke/OpenSlides,emanuelschuetze/OpenSlides
|
from csv import Dialect, excel, register_dialect
class excel_semikolon(Dialect):
delimiter = ';'
doublequote = True
lineterminator = '\r\n'
quotechar = '"'
quoting = 0
skipinitialspace = False
def patchup(dialect):
if dialect:
- if dialect.delimiter == excel_semikolon.delimiter and \
+ if dialect.delimiter in [excel_semikolon.delimiter, excel.delimiter] and \
dialect.quotechar == excel_semikolon.quotechar:
# walks like a duck and talks like a duck.. must be one
dialect.doublequote = True
return dialect
register_dialect("excel_semikolon", excel_semikolon)
|
Extend patchup for builtin excel dialect
|
## Code Before:
from csv import Dialect, excel, register_dialect
class excel_semikolon(Dialect):
delimiter = ';'
doublequote = True
lineterminator = '\r\n'
quotechar = '"'
quoting = 0
skipinitialspace = False
def patchup(dialect):
if dialect:
if dialect.delimiter == excel_semikolon.delimiter and \
dialect.quotechar == excel_semikolon.quotechar:
# walks like a duck and talks like a duck.. must be one
dialect.doublequote = True
return dialect
register_dialect("excel_semikolon", excel_semikolon)
## Instruction:
Extend patchup for builtin excel dialect
## Code After:
from csv import Dialect, excel, register_dialect
class excel_semikolon(Dialect):
delimiter = ';'
doublequote = True
lineterminator = '\r\n'
quotechar = '"'
quoting = 0
skipinitialspace = False
def patchup(dialect):
if dialect:
if dialect.delimiter in [excel_semikolon.delimiter, excel.delimiter] and \
dialect.quotechar == excel_semikolon.quotechar:
# walks like a duck and talks like a duck.. must be one
dialect.doublequote = True
return dialect
register_dialect("excel_semikolon", excel_semikolon)
|
from csv import Dialect, excel, register_dialect
class excel_semikolon(Dialect):
delimiter = ';'
doublequote = True
lineterminator = '\r\n'
quotechar = '"'
quoting = 0
skipinitialspace = False
def patchup(dialect):
if dialect:
- if dialect.delimiter == excel_semikolon.delimiter and \
? ^^
+ if dialect.delimiter in [excel_semikolon.delimiter, excel.delimiter] and \
? ^^ + ++++++++++++++++++
dialect.quotechar == excel_semikolon.quotechar:
# walks like a duck and talks like a duck.. must be one
dialect.doublequote = True
return dialect
register_dialect("excel_semikolon", excel_semikolon)
|
c313d6fb6803edabb956e1e90f040f8518c334bf
|
app/main/errors.py
|
app/main/errors.py
|
from flask import render_template
from . import main
@main.app_errorhandler(404)
def page_not_found(e):
return render_template("404.html"), 404
|
from flask import render_template
from . import main
@main.app_errorhandler(404)
def page_not_found(e):
return render_template("404.html",
**main.config['BASE_TEMPLATE_DATA']), 404
|
Fix 404 page template static resources
|
Fix 404 page template static resources
|
Python
|
mit
|
mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
|
from flask import render_template
from . import main
@main.app_errorhandler(404)
def page_not_found(e):
- return render_template("404.html"), 404
+ return render_template("404.html",
+ **main.config['BASE_TEMPLATE_DATA']), 404
|
Fix 404 page template static resources
|
## Code Before:
from flask import render_template
from . import main
@main.app_errorhandler(404)
def page_not_found(e):
return render_template("404.html"), 404
## Instruction:
Fix 404 page template static resources
## Code After:
from flask import render_template
from . import main
@main.app_errorhandler(404)
def page_not_found(e):
return render_template("404.html",
**main.config['BASE_TEMPLATE_DATA']), 404
|
from flask import render_template
from . import main
@main.app_errorhandler(404)
def page_not_found(e):
- return render_template("404.html"), 404
? - ----
+ return render_template("404.html",
+ **main.config['BASE_TEMPLATE_DATA']), 404
|
50130fa011104806cc66331fe5a6ebc3f98c9d5c
|
vistrails/packages/tej/widgets.py
|
vistrails/packages/tej/widgets.py
|
from __future__ import division
from PyQt4 import QtGui
from vistrails.gui.modules.source_configure import SourceConfigurationWidget
class ShellSourceConfigurationWidget(SourceConfigurationWidget):
"""Configuration widget for SubmitShellJob.
Allows the user to edit a shell script that will be run on the server.
"""
def __init__(self, module, controller, parent=None):
SourceConfigurationWidget.__init__(self, module, controller,
QtGui.QTextEdit,
has_inputs=False, has_outputs=False,
parent=parent)
|
from __future__ import division
from vistrails.gui.modules.source_configure import SourceConfigurationWidget
from vistrails.gui.modules.string_configure import TextEditor
class ShellSourceConfigurationWidget(SourceConfigurationWidget):
"""Configuration widget for SubmitShellJob.
Allows the user to edit a shell script that will be run on the server.
"""
def __init__(self, module, controller, parent=None):
SourceConfigurationWidget.__init__(self, module, controller,
TextEditor,
has_inputs=False, has_outputs=False,
parent=parent)
|
Use smart text editor in tej.SubmitShellJob
|
Use smart text editor in tej.SubmitShellJob
|
Python
|
bsd-3-clause
|
minesense/VisTrails,VisTrails/VisTrails,hjanime/VisTrails,hjanime/VisTrails,hjanime/VisTrails,minesense/VisTrails,VisTrails/VisTrails,hjanime/VisTrails,minesense/VisTrails,VisTrails/VisTrails,VisTrails/VisTrails,minesense/VisTrails,minesense/VisTrails,hjanime/VisTrails,VisTrails/VisTrails
|
from __future__ import division
- from PyQt4 import QtGui
-
from vistrails.gui.modules.source_configure import SourceConfigurationWidget
+ from vistrails.gui.modules.string_configure import TextEditor
class ShellSourceConfigurationWidget(SourceConfigurationWidget):
"""Configuration widget for SubmitShellJob.
Allows the user to edit a shell script that will be run on the server.
"""
def __init__(self, module, controller, parent=None):
SourceConfigurationWidget.__init__(self, module, controller,
- QtGui.QTextEdit,
+ TextEditor,
has_inputs=False, has_outputs=False,
parent=parent)
|
Use smart text editor in tej.SubmitShellJob
|
## Code Before:
from __future__ import division
from PyQt4 import QtGui
from vistrails.gui.modules.source_configure import SourceConfigurationWidget
class ShellSourceConfigurationWidget(SourceConfigurationWidget):
"""Configuration widget for SubmitShellJob.
Allows the user to edit a shell script that will be run on the server.
"""
def __init__(self, module, controller, parent=None):
SourceConfigurationWidget.__init__(self, module, controller,
QtGui.QTextEdit,
has_inputs=False, has_outputs=False,
parent=parent)
## Instruction:
Use smart text editor in tej.SubmitShellJob
## Code After:
from __future__ import division
from vistrails.gui.modules.source_configure import SourceConfigurationWidget
from vistrails.gui.modules.string_configure import TextEditor
class ShellSourceConfigurationWidget(SourceConfigurationWidget):
"""Configuration widget for SubmitShellJob.
Allows the user to edit a shell script that will be run on the server.
"""
def __init__(self, module, controller, parent=None):
SourceConfigurationWidget.__init__(self, module, controller,
TextEditor,
has_inputs=False, has_outputs=False,
parent=parent)
|
from __future__ import division
- from PyQt4 import QtGui
-
from vistrails.gui.modules.source_configure import SourceConfigurationWidget
+ from vistrails.gui.modules.string_configure import TextEditor
class ShellSourceConfigurationWidget(SourceConfigurationWidget):
"""Configuration widget for SubmitShellJob.
Allows the user to edit a shell script that will be run on the server.
"""
def __init__(self, module, controller, parent=None):
SourceConfigurationWidget.__init__(self, module, controller,
- QtGui.QTextEdit,
? -------
+ TextEditor,
? ++
has_inputs=False, has_outputs=False,
parent=parent)
|
e87e136dd590134b7be6f5d04aebeed719880c9e
|
paasta_tools/paasta_native_serviceinit.py
|
paasta_tools/paasta_native_serviceinit.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from paasta_tools import native_mesos_scheduler
from paasta_tools.mesos_tools import status_mesos_tasks_verbose
from paasta_tools.utils import calculate_tail_lines
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import paasta_print
def perform_command(command, service, instance, cluster, verbose, soa_dir):
if verbose > 0:
tail_lines = calculate_tail_lines(verbose_level=verbose)
else:
tail_lines = 0
# We have to add a spacer at the end to make sure we only return
# things for service.main and not service.main_foo
task_id_prefix = "%s%s" % (compose_job_id(service, instance), native_mesos_scheduler.MESOS_TASK_SPACER)
if command == 'status':
paasta_print(status_mesos_tasks_verbose(
job_id=task_id_prefix,
get_short_task_id=lambda x: x,
tail_lines=tail_lines,
))
|
from __future__ import absolute_import
from __future__ import unicode_literals
from paasta_tools.frameworks.native_scheduler import MESOS_TASK_SPACER
from paasta_tools.mesos_tools import status_mesos_tasks_verbose
from paasta_tools.utils import calculate_tail_lines
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import paasta_print
def perform_command(command, service, instance, cluster, verbose, soa_dir):
if verbose > 0:
tail_lines = calculate_tail_lines(verbose_level=verbose)
else:
tail_lines = 0
# We have to add a spacer at the end to make sure we only return
# things for service.main and not service.main_foo
task_id_prefix = "%s%s" % (compose_job_id(service, instance), MESOS_TASK_SPACER)
if command == 'status':
paasta_print(status_mesos_tasks_verbose(
job_id=task_id_prefix,
get_short_task_id=lambda x: x,
tail_lines=tail_lines,
))
|
Fix broken import in native scheduler
|
Fix broken import in native scheduler
|
Python
|
apache-2.0
|
Yelp/paasta,somic/paasta,Yelp/paasta,somic/paasta
|
from __future__ import absolute_import
from __future__ import unicode_literals
- from paasta_tools import native_mesos_scheduler
+ from paasta_tools.frameworks.native_scheduler import MESOS_TASK_SPACER
from paasta_tools.mesos_tools import status_mesos_tasks_verbose
from paasta_tools.utils import calculate_tail_lines
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import paasta_print
def perform_command(command, service, instance, cluster, verbose, soa_dir):
if verbose > 0:
tail_lines = calculate_tail_lines(verbose_level=verbose)
else:
tail_lines = 0
# We have to add a spacer at the end to make sure we only return
# things for service.main and not service.main_foo
- task_id_prefix = "%s%s" % (compose_job_id(service, instance), native_mesos_scheduler.MESOS_TASK_SPACER)
+ task_id_prefix = "%s%s" % (compose_job_id(service, instance), MESOS_TASK_SPACER)
if command == 'status':
paasta_print(status_mesos_tasks_verbose(
job_id=task_id_prefix,
get_short_task_id=lambda x: x,
tail_lines=tail_lines,
))
|
Fix broken import in native scheduler
|
## Code Before:
from __future__ import absolute_import
from __future__ import unicode_literals
from paasta_tools import native_mesos_scheduler
from paasta_tools.mesos_tools import status_mesos_tasks_verbose
from paasta_tools.utils import calculate_tail_lines
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import paasta_print
def perform_command(command, service, instance, cluster, verbose, soa_dir):
if verbose > 0:
tail_lines = calculate_tail_lines(verbose_level=verbose)
else:
tail_lines = 0
# We have to add a spacer at the end to make sure we only return
# things for service.main and not service.main_foo
task_id_prefix = "%s%s" % (compose_job_id(service, instance), native_mesos_scheduler.MESOS_TASK_SPACER)
if command == 'status':
paasta_print(status_mesos_tasks_verbose(
job_id=task_id_prefix,
get_short_task_id=lambda x: x,
tail_lines=tail_lines,
))
## Instruction:
Fix broken import in native scheduler
## Code After:
from __future__ import absolute_import
from __future__ import unicode_literals
from paasta_tools.frameworks.native_scheduler import MESOS_TASK_SPACER
from paasta_tools.mesos_tools import status_mesos_tasks_verbose
from paasta_tools.utils import calculate_tail_lines
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import paasta_print
def perform_command(command, service, instance, cluster, verbose, soa_dir):
if verbose > 0:
tail_lines = calculate_tail_lines(verbose_level=verbose)
else:
tail_lines = 0
# We have to add a spacer at the end to make sure we only return
# things for service.main and not service.main_foo
task_id_prefix = "%s%s" % (compose_job_id(service, instance), MESOS_TASK_SPACER)
if command == 'status':
paasta_print(status_mesos_tasks_verbose(
job_id=task_id_prefix,
get_short_task_id=lambda x: x,
tail_lines=tail_lines,
))
|
from __future__ import absolute_import
from __future__ import unicode_literals
- from paasta_tools import native_mesos_scheduler
+ from paasta_tools.frameworks.native_scheduler import MESOS_TASK_SPACER
from paasta_tools.mesos_tools import status_mesos_tasks_verbose
from paasta_tools.utils import calculate_tail_lines
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import paasta_print
def perform_command(command, service, instance, cluster, verbose, soa_dir):
if verbose > 0:
tail_lines = calculate_tail_lines(verbose_level=verbose)
else:
tail_lines = 0
# We have to add a spacer at the end to make sure we only return
# things for service.main and not service.main_foo
- task_id_prefix = "%s%s" % (compose_job_id(service, instance), native_mesos_scheduler.MESOS_TASK_SPACER)
? -----------------------
+ task_id_prefix = "%s%s" % (compose_job_id(service, instance), MESOS_TASK_SPACER)
if command == 'status':
paasta_print(status_mesos_tasks_verbose(
job_id=task_id_prefix,
get_short_task_id=lambda x: x,
tail_lines=tail_lines,
))
|
26e2feb6f2dfe74ade46cf871167101599c0acba
|
app/timetables/models.py
|
app/timetables/models.py
|
from __future__ import unicode_literals
from django.db import models
from common.mixins import ForceCapitalizeMixin
class Weekday(ForceCapitalizeMixin, models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
capitalized_field_names = ('name',)
def __str__(self):
return self.name
class Meal(ForceCapitalizeMixin, models.Model):
"""
Model representing food occasions.
This represents an occasion during the day that food
is scheduled to be served. E.g breakfast, lunch, etc.
"""
name = models.CharField(max_length=60, unique=True)
start_time = models.TimeField()
end_time = models.TimeField()
capitalized_field_names = ('name',)
def __str__(self):
return self.name
|
from __future__ import unicode_literals
from django.db import models
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from common.mixins import ForceCapitalizeMixin
class Weekday(ForceCapitalizeMixin, models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
capitalized_field_names = ('name',)
def __str__(self):
return self.name
class Meal(ForceCapitalizeMixin, models.Model):
"""
Model representing food occasions.
This represents an occasion during the day that food
is scheduled to be served. E.g breakfast, lunch, etc.
"""
name = models.CharField(max_length=60, unique=True)
start_time = models.TimeField()
end_time = models.TimeField()
capitalized_field_names = ('name',)
def clean(self):
if self.start_time >= self.end_time:
raise ValidationError(_('start_time must be less than end_time.'))
super().clean()
def __str__(self):
return self.name
|
Update clean method to ensure that meal end_time is not same as or less than meal start_time
|
Update clean method to ensure that meal end_time is not same as or less than meal start_time
|
Python
|
mit
|
teamtaverna/core
|
from __future__ import unicode_literals
from django.db import models
+ from django.core.exceptions import ValidationError
+ from django.utils.translation import ugettext_lazy as _
from common.mixins import ForceCapitalizeMixin
class Weekday(ForceCapitalizeMixin, models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
capitalized_field_names = ('name',)
def __str__(self):
return self.name
class Meal(ForceCapitalizeMixin, models.Model):
"""
Model representing food occasions.
This represents an occasion during the day that food
is scheduled to be served. E.g breakfast, lunch, etc.
"""
name = models.CharField(max_length=60, unique=True)
start_time = models.TimeField()
end_time = models.TimeField()
capitalized_field_names = ('name',)
+ def clean(self):
+ if self.start_time >= self.end_time:
+ raise ValidationError(_('start_time must be less than end_time.'))
+ super().clean()
+
def __str__(self):
return self.name
|
Update clean method to ensure that meal end_time is not same as or less than meal start_time
|
## Code Before:
from __future__ import unicode_literals
from django.db import models
from common.mixins import ForceCapitalizeMixin
class Weekday(ForceCapitalizeMixin, models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
capitalized_field_names = ('name',)
def __str__(self):
return self.name
class Meal(ForceCapitalizeMixin, models.Model):
"""
Model representing food occasions.
This represents an occasion during the day that food
is scheduled to be served. E.g breakfast, lunch, etc.
"""
name = models.CharField(max_length=60, unique=True)
start_time = models.TimeField()
end_time = models.TimeField()
capitalized_field_names = ('name',)
def __str__(self):
return self.name
## Instruction:
Update clean method to ensure that meal end_time is not same as or less than meal start_time
## Code After:
from __future__ import unicode_literals
from django.db import models
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from common.mixins import ForceCapitalizeMixin
class Weekday(ForceCapitalizeMixin, models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
capitalized_field_names = ('name',)
def __str__(self):
return self.name
class Meal(ForceCapitalizeMixin, models.Model):
"""
Model representing food occasions.
This represents an occasion during the day that food
is scheduled to be served. E.g breakfast, lunch, etc.
"""
name = models.CharField(max_length=60, unique=True)
start_time = models.TimeField()
end_time = models.TimeField()
capitalized_field_names = ('name',)
def clean(self):
if self.start_time >= self.end_time:
raise ValidationError(_('start_time must be less than end_time.'))
super().clean()
def __str__(self):
return self.name
|
from __future__ import unicode_literals
from django.db import models
+ from django.core.exceptions import ValidationError
+ from django.utils.translation import ugettext_lazy as _
from common.mixins import ForceCapitalizeMixin
class Weekday(ForceCapitalizeMixin, models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
capitalized_field_names = ('name',)
def __str__(self):
return self.name
class Meal(ForceCapitalizeMixin, models.Model):
"""
Model representing food occasions.
This represents an occasion during the day that food
is scheduled to be served. E.g breakfast, lunch, etc.
"""
name = models.CharField(max_length=60, unique=True)
start_time = models.TimeField()
end_time = models.TimeField()
capitalized_field_names = ('name',)
+ def clean(self):
+ if self.start_time >= self.end_time:
+ raise ValidationError(_('start_time must be less than end_time.'))
+ super().clean()
+
def __str__(self):
return self.name
|
09be419960d208967771d93025c4f86b80ebe4e9
|
python/qibuild/__init__.py
|
python/qibuild/__init__.py
|
""" This module contains a few functions for running CMake and building projects. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
QIBUILD_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def stringify_env(env):
""" convert each key value pairs to strings in env list"""
return dict(((str(key), str(val)) for key, val in env.items()))
|
""" This module contains a few functions for running CMake and building projects. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
QIBUILD_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def stringify_env(env):
""" convert each key value pairs to strings in env list"""
return dict(((str(key), str(val)) for key, val in env.items()))
|
Revert "use utf-8 by default"
|
Revert "use utf-8 by default"
This reverts commit a986aac5e3b4f065d6c2ab70129bde105651d2ca.
|
Python
|
bsd-3-clause
|
aldebaran/qibuild,aldebaran/qibuild,aldebaran/qibuild,aldebaran/qibuild
|
""" This module contains a few functions for running CMake and building projects. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
- import sys
- reload(sys)
- sys.setdefaultencoding('utf-8')
QIBUILD_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def stringify_env(env):
""" convert each key value pairs to strings in env list"""
return dict(((str(key), str(val)) for key, val in env.items()))
|
Revert "use utf-8 by default"
|
## Code Before:
""" This module contains a few functions for running CMake and building projects. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
QIBUILD_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def stringify_env(env):
""" convert each key value pairs to strings in env list"""
return dict(((str(key), str(val)) for key, val in env.items()))
## Instruction:
Revert "use utf-8 by default"
## Code After:
""" This module contains a few functions for running CMake and building projects. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
QIBUILD_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def stringify_env(env):
""" convert each key value pairs to strings in env list"""
return dict(((str(key), str(val)) for key, val in env.items()))
|
""" This module contains a few functions for running CMake and building projects. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
- import sys
- reload(sys)
- sys.setdefaultencoding('utf-8')
QIBUILD_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def stringify_env(env):
""" convert each key value pairs to strings in env list"""
return dict(((str(key), str(val)) for key, val in env.items()))
|
624ce97b011100cc1aac9446c7f1c8a97eae5f34
|
workshops/migrations/0040_add_country_to_online_events.py
|
workshops/migrations/0040_add_country_to_online_events.py
|
from __future__ import unicode_literals
from django.db import models, migrations
def add_country_to_online_events(apps, schema_editor):
"""Add an 'Online' country to all events tagged with 'online' tag."""
Event = apps.get_model('workshops', 'Event')
Tag = apps.get_model('workshops', 'Tag')
online, _ = Tag.objects.get_or_create(
name='online',
defaults={'details': 'Events taking place entirely online'},
)
Event.objects.filter(country__isnull=True, tags__in=[online]) \
.update(country='W3')
class Migration(migrations.Migration):
dependencies = [
('workshops', '0039_add_permission_groups'),
]
operations = [
migrations.RunPython(add_country_to_online_events),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
def add_country_to_online_events(apps, schema_editor):
"""Add an 'Online' country to all events tagged with 'online' tag."""
Event = apps.get_model('workshops', 'Event')
Tag = apps.get_model('workshops', 'Tag')
online, _ = Tag.objects.get_or_create(
name='online',
defaults={'details': 'Events taking place entirely online'},
)
# Oceanic Pole of Inaccessibility coordinates:
# https://en.wikipedia.org/wiki/Pole_of_inaccessibility#Oceanic_pole_of_inaccessibility
latitude = -48.876667
longitude = -123.393333
Event.objects.filter(country__isnull=True, tags__in=[online]) \
.update(country='W3', latitude=latitude, longitude=longitude,
venue='Internet')
class Migration(migrations.Migration):
dependencies = [
('workshops', '0039_add_permission_groups'),
]
operations = [
migrations.RunPython(add_country_to_online_events),
]
|
Migrate online events to the Pole of Inaccessibility lat/long
|
Migrate online events to the Pole of Inaccessibility lat/long
...and 'internet' as a venue.
|
Python
|
mit
|
pbanaszkiewicz/amy,vahtras/amy,swcarpentry/amy,swcarpentry/amy,wking/swc-amy,vahtras/amy,wking/swc-amy,wking/swc-amy,vahtras/amy,pbanaszkiewicz/amy,wking/swc-amy,pbanaszkiewicz/amy,swcarpentry/amy
|
from __future__ import unicode_literals
from django.db import models, migrations
def add_country_to_online_events(apps, schema_editor):
"""Add an 'Online' country to all events tagged with 'online' tag."""
Event = apps.get_model('workshops', 'Event')
Tag = apps.get_model('workshops', 'Tag')
online, _ = Tag.objects.get_or_create(
name='online',
defaults={'details': 'Events taking place entirely online'},
)
+ # Oceanic Pole of Inaccessibility coordinates:
+ # https://en.wikipedia.org/wiki/Pole_of_inaccessibility#Oceanic_pole_of_inaccessibility
+ latitude = -48.876667
+ longitude = -123.393333
+
Event.objects.filter(country__isnull=True, tags__in=[online]) \
- .update(country='W3')
+ .update(country='W3', latitude=latitude, longitude=longitude,
+ venue='Internet')
class Migration(migrations.Migration):
dependencies = [
('workshops', '0039_add_permission_groups'),
]
operations = [
migrations.RunPython(add_country_to_online_events),
]
|
Migrate online events to the Pole of Inaccessibility lat/long
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
def add_country_to_online_events(apps, schema_editor):
"""Add an 'Online' country to all events tagged with 'online' tag."""
Event = apps.get_model('workshops', 'Event')
Tag = apps.get_model('workshops', 'Tag')
online, _ = Tag.objects.get_or_create(
name='online',
defaults={'details': 'Events taking place entirely online'},
)
Event.objects.filter(country__isnull=True, tags__in=[online]) \
.update(country='W3')
class Migration(migrations.Migration):
dependencies = [
('workshops', '0039_add_permission_groups'),
]
operations = [
migrations.RunPython(add_country_to_online_events),
]
## Instruction:
Migrate online events to the Pole of Inaccessibility lat/long
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
def add_country_to_online_events(apps, schema_editor):
"""Add an 'Online' country to all events tagged with 'online' tag."""
Event = apps.get_model('workshops', 'Event')
Tag = apps.get_model('workshops', 'Tag')
online, _ = Tag.objects.get_or_create(
name='online',
defaults={'details': 'Events taking place entirely online'},
)
# Oceanic Pole of Inaccessibility coordinates:
# https://en.wikipedia.org/wiki/Pole_of_inaccessibility#Oceanic_pole_of_inaccessibility
latitude = -48.876667
longitude = -123.393333
Event.objects.filter(country__isnull=True, tags__in=[online]) \
.update(country='W3', latitude=latitude, longitude=longitude,
venue='Internet')
class Migration(migrations.Migration):
dependencies = [
('workshops', '0039_add_permission_groups'),
]
operations = [
migrations.RunPython(add_country_to_online_events),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
def add_country_to_online_events(apps, schema_editor):
"""Add an 'Online' country to all events tagged with 'online' tag."""
Event = apps.get_model('workshops', 'Event')
Tag = apps.get_model('workshops', 'Tag')
online, _ = Tag.objects.get_or_create(
name='online',
defaults={'details': 'Events taking place entirely online'},
)
+ # Oceanic Pole of Inaccessibility coordinates:
+ # https://en.wikipedia.org/wiki/Pole_of_inaccessibility#Oceanic_pole_of_inaccessibility
+ latitude = -48.876667
+ longitude = -123.393333
+
Event.objects.filter(country__isnull=True, tags__in=[online]) \
- .update(country='W3')
+ .update(country='W3', latitude=latitude, longitude=longitude,
+ venue='Internet')
class Migration(migrations.Migration):
dependencies = [
('workshops', '0039_add_permission_groups'),
]
operations = [
migrations.RunPython(add_country_to_online_events),
]
|
1f8b54d22cee5653254514bf07c1b4cb1eb147cb
|
_grabconfig.py
|
_grabconfig.py
|
import os, shutil
files = ["/etc/crontab",
"/usr/local/bin/ssu", "/usr/local/bin/xyzzy",
"/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim",
"/home/dagon/.config/bless", "/home/dagon/.config/terminator", "/bla.txt"]
for item in files:
dest = os.getcwd() + item
if os.path.isdir(item):
try:
shutil.rmtree(dest)
except: pass
shutil.copytree(item, dest)
else:
try:
os.remove(dest)
except: pass
shutil.copyfile(item, dest)
|
import os, shutil
files = ["/etc/crontab",
"/usr/local/bin/ssu", "/usr/local/bin/xyzzy",
"/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim",
"/home/dagon/.config/bless", "/home/dagon/.config/terminator"]
for item in files:
dest = os.getcwd() + item
if os.path.isdir(item):
try:
shutil.rmtree(dest)
except: pass
shutil.copytree(item, dest)
else:
try:
os.remove(dest)
except: pass
shutil.copyfile(item, dest)
|
Remove test item from config grabbing script
|
Remove test item from config grabbing script
|
Python
|
unlicense
|
weloxux/dotfiles,weloxux/dotfiles
|
import os, shutil
files = ["/etc/crontab",
"/usr/local/bin/ssu", "/usr/local/bin/xyzzy",
"/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim",
- "/home/dagon/.config/bless", "/home/dagon/.config/terminator", "/bla.txt"]
+ "/home/dagon/.config/bless", "/home/dagon/.config/terminator"]
for item in files:
dest = os.getcwd() + item
if os.path.isdir(item):
try:
shutil.rmtree(dest)
except: pass
shutil.copytree(item, dest)
else:
try:
os.remove(dest)
except: pass
shutil.copyfile(item, dest)
|
Remove test item from config grabbing script
|
## Code Before:
import os, shutil
files = ["/etc/crontab",
"/usr/local/bin/ssu", "/usr/local/bin/xyzzy",
"/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim",
"/home/dagon/.config/bless", "/home/dagon/.config/terminator", "/bla.txt"]
for item in files:
dest = os.getcwd() + item
if os.path.isdir(item):
try:
shutil.rmtree(dest)
except: pass
shutil.copytree(item, dest)
else:
try:
os.remove(dest)
except: pass
shutil.copyfile(item, dest)
## Instruction:
Remove test item from config grabbing script
## Code After:
import os, shutil
files = ["/etc/crontab",
"/usr/local/bin/ssu", "/usr/local/bin/xyzzy",
"/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim",
"/home/dagon/.config/bless", "/home/dagon/.config/terminator"]
for item in files:
dest = os.getcwd() + item
if os.path.isdir(item):
try:
shutil.rmtree(dest)
except: pass
shutil.copytree(item, dest)
else:
try:
os.remove(dest)
except: pass
shutil.copyfile(item, dest)
|
import os, shutil
files = ["/etc/crontab",
"/usr/local/bin/ssu", "/usr/local/bin/xyzzy",
"/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim",
- "/home/dagon/.config/bless", "/home/dagon/.config/terminator", "/bla.txt"]
? ------------
+ "/home/dagon/.config/bless", "/home/dagon/.config/terminator"]
for item in files:
dest = os.getcwd() + item
if os.path.isdir(item):
try:
shutil.rmtree(dest)
except: pass
shutil.copytree(item, dest)
else:
try:
os.remove(dest)
except: pass
shutil.copyfile(item, dest)
|
baf09f8b308626abb81431ddca4498409fc9d5ce
|
campaigns/tests/test_views.py
|
campaigns/tests/test_views.py
|
from django.test import TestCase
from django.http import HttpRequest
from campaigns.views import create_campaign
from campaigns.models import Campaign
from campaigns.forms import CampaignForm
class HomePageTest(TestCase):
def test_does_root_url_resolves_the_home_page(self):
called = self.client.get('/')
self.assertTemplateUsed(called, 'home.html')
class CampaignsViewsTest(TestCase):
def test_does_create_campaign_resolves_the_right_url(self):
called = self.client.get('/campaigns/new')
self.assertTemplateUsed(called, 'new_campaign.html')
# Trying to do self.client.post was using GET request for some
# reason so i made it that ugly
def test_does_create_camapign_saves_objects_with_POST_requests(self):
self.assertEqual(Campaign.objects.count(), 0)
request = HttpRequest()
request.method = 'POST'
request.POST['title'] = 'C1'
request.POST['description'] = 'C1Descr'
create_campaign(request)
campaign = Campaign.objects.first()
self.assertEqual(Campaign.objects.count(), 1)
self.assertEqual(campaign.title, 'C1')
self.assertEqual(campaign.description, 'C1Descr')
|
from django.test import TestCase
from django.http import HttpRequest
from campaigns.views import create_campaign
from campaigns.models import Campaign
from campaigns.forms import CampaignForm
def make_POST_request(titleValue, descriptionValue):
request = HttpRequest()
request.method = 'POST'
request.POST['title'] = titleValue
request.POST['description'] = descriptionValue
return request
class HomePageTest(TestCase):
def test_does_root_url_resolves_the_home_page(self):
called = self.client.get('/')
self.assertTemplateUsed(called, 'home.html')
class CampaignsViewsTest(TestCase):
def test_does_create_campaign_resolves_the_right_url(self):
called = self.client.get('/campaigns/new')
self.assertTemplateUsed(called, 'new_campaign.html')
# Trying to do self.client.post was using GET request for some
# reason so i made it that ugly
def test_does_create_campaign_saves_objects_with_POST_requests(self):
self.assertEqual(Campaign.objects.count(), 0)
create_campaign(make_POST_request('C1', 'C1Descr'))
campaign = Campaign.objects.first()
self.assertEqual(Campaign.objects.count(), 1)
self.assertEqual(campaign.title, 'C1')
self.assertEqual(campaign.description, 'C1Descr')
def test_create_campaign_dont_saves_empty_objects(self):
self.assertEqual(Campaign.objects.count(), 0)
create_campaign(make_POST_request('', ''))
self.assertEqual(Campaign.objects.count(), 0)
|
Refactor some redundancy in the views tests
|
Refactor some redundancy in the views tests
|
Python
|
apache-2.0
|
Springsteen/tues_admission,Springsteen/tues_admission,Springsteen/tues_admission,Springsteen/tues_admission
|
from django.test import TestCase
from django.http import HttpRequest
from campaigns.views import create_campaign
from campaigns.models import Campaign
from campaigns.forms import CampaignForm
+ def make_POST_request(titleValue, descriptionValue):
+ request = HttpRequest()
+ request.method = 'POST'
+ request.POST['title'] = titleValue
+ request.POST['description'] = descriptionValue
+ return request
+
class HomePageTest(TestCase):
def test_does_root_url_resolves_the_home_page(self):
called = self.client.get('/')
self.assertTemplateUsed(called, 'home.html')
+
class CampaignsViewsTest(TestCase):
def test_does_create_campaign_resolves_the_right_url(self):
called = self.client.get('/campaigns/new')
self.assertTemplateUsed(called, 'new_campaign.html')
# Trying to do self.client.post was using GET request for some
# reason so i made it that ugly
- def test_does_create_camapign_saves_objects_with_POST_requests(self):
+ def test_does_create_campaign_saves_objects_with_POST_requests(self):
self.assertEqual(Campaign.objects.count(), 0)
+ create_campaign(make_POST_request('C1', 'C1Descr'))
- request = HttpRequest()
- request.method = 'POST'
- request.POST['title'] = 'C1'
- request.POST['description'] = 'C1Descr'
- create_campaign(request)
campaign = Campaign.objects.first()
self.assertEqual(Campaign.objects.count(), 1)
self.assertEqual(campaign.title, 'C1')
self.assertEqual(campaign.description, 'C1Descr')
+ def test_create_campaign_dont_saves_empty_objects(self):
+ self.assertEqual(Campaign.objects.count(), 0)
+ create_campaign(make_POST_request('', ''))
+ self.assertEqual(Campaign.objects.count(), 0)
+
|
Refactor some redundancy in the views tests
|
## Code Before:
from django.test import TestCase
from django.http import HttpRequest
from campaigns.views import create_campaign
from campaigns.models import Campaign
from campaigns.forms import CampaignForm
class HomePageTest(TestCase):
def test_does_root_url_resolves_the_home_page(self):
called = self.client.get('/')
self.assertTemplateUsed(called, 'home.html')
class CampaignsViewsTest(TestCase):
def test_does_create_campaign_resolves_the_right_url(self):
called = self.client.get('/campaigns/new')
self.assertTemplateUsed(called, 'new_campaign.html')
# Trying to do self.client.post was using GET request for some
# reason so i made it that ugly
def test_does_create_camapign_saves_objects_with_POST_requests(self):
self.assertEqual(Campaign.objects.count(), 0)
request = HttpRequest()
request.method = 'POST'
request.POST['title'] = 'C1'
request.POST['description'] = 'C1Descr'
create_campaign(request)
campaign = Campaign.objects.first()
self.assertEqual(Campaign.objects.count(), 1)
self.assertEqual(campaign.title, 'C1')
self.assertEqual(campaign.description, 'C1Descr')
## Instruction:
Refactor some redundancy in the views tests
## Code After:
from django.test import TestCase
from django.http import HttpRequest
from campaigns.views import create_campaign
from campaigns.models import Campaign
from campaigns.forms import CampaignForm
def make_POST_request(titleValue, descriptionValue):
request = HttpRequest()
request.method = 'POST'
request.POST['title'] = titleValue
request.POST['description'] = descriptionValue
return request
class HomePageTest(TestCase):
def test_does_root_url_resolves_the_home_page(self):
called = self.client.get('/')
self.assertTemplateUsed(called, 'home.html')
class CampaignsViewsTest(TestCase):
def test_does_create_campaign_resolves_the_right_url(self):
called = self.client.get('/campaigns/new')
self.assertTemplateUsed(called, 'new_campaign.html')
# Trying to do self.client.post was using GET request for some
# reason so i made it that ugly
def test_does_create_campaign_saves_objects_with_POST_requests(self):
self.assertEqual(Campaign.objects.count(), 0)
create_campaign(make_POST_request('C1', 'C1Descr'))
campaign = Campaign.objects.first()
self.assertEqual(Campaign.objects.count(), 1)
self.assertEqual(campaign.title, 'C1')
self.assertEqual(campaign.description, 'C1Descr')
def test_create_campaign_dont_saves_empty_objects(self):
self.assertEqual(Campaign.objects.count(), 0)
create_campaign(make_POST_request('', ''))
self.assertEqual(Campaign.objects.count(), 0)
|
from django.test import TestCase
from django.http import HttpRequest
from campaigns.views import create_campaign
from campaigns.models import Campaign
from campaigns.forms import CampaignForm
+ def make_POST_request(titleValue, descriptionValue):
+ request = HttpRequest()
+ request.method = 'POST'
+ request.POST['title'] = titleValue
+ request.POST['description'] = descriptionValue
+ return request
+
class HomePageTest(TestCase):
def test_does_root_url_resolves_the_home_page(self):
called = self.client.get('/')
self.assertTemplateUsed(called, 'home.html')
+
class CampaignsViewsTest(TestCase):
def test_does_create_campaign_resolves_the_right_url(self):
called = self.client.get('/campaigns/new')
self.assertTemplateUsed(called, 'new_campaign.html')
# Trying to do self.client.post was using GET request for some
# reason so i made it that ugly
- def test_does_create_camapign_saves_objects_with_POST_requests(self):
? -
+ def test_does_create_campaign_saves_objects_with_POST_requests(self):
? +
self.assertEqual(Campaign.objects.count(), 0)
+ create_campaign(make_POST_request('C1', 'C1Descr'))
- request = HttpRequest()
- request.method = 'POST'
- request.POST['title'] = 'C1'
- request.POST['description'] = 'C1Descr'
- create_campaign(request)
campaign = Campaign.objects.first()
self.assertEqual(Campaign.objects.count(), 1)
self.assertEqual(campaign.title, 'C1')
self.assertEqual(campaign.description, 'C1Descr')
+ def test_create_campaign_dont_saves_empty_objects(self):
+ self.assertEqual(Campaign.objects.count(), 0)
+ create_campaign(make_POST_request('', ''))
+ self.assertEqual(Campaign.objects.count(), 0)
+
|
0612ea6aea5a10e5639a710500c321e3c9e02495
|
interfaces/python/setup.py
|
interfaces/python/setup.py
|
from distutils.core import setup, Extension
import fnmatch
import os
import re
cppSources = []
for root, dirnames, filenames in os.walk('.'):
if root == 'src': cppSources.append(os.path.join(root, 'Infomap.cpp'))
else:
for filename in fnmatch.filter(filenames, '*.cpp'):
cppSources.append(os.path.join(root, filename))
# Extract Infomap version
infomapVersion = ''
with open(os.path.join('src', 'io', 'version.cpp')) as f:
for line in f:
m = re.match( r'.+INFOMAP_VERSION = \"(.+)\"', line)
if m: infomapVersion = m.groups()[0]
infomap_module = Extension('_infomap',
sources=cppSources,
extra_compile_args=['-DAS_LIB']
)
setup (name = 'infomap',
version = infomapVersion,
author = "Team at mapequation.org",
description = """Infomap clustering algorithm""",
url = "www.mapequation.org",
ext_modules = [infomap_module],
py_modules = ["infomap"],
)
|
from distutils.core import setup, Extension
from distutils.file_util import copy_file
import sysconfig
import fnmatch
import os
import re
cppSources = []
for root, dirnames, filenames in os.walk('.'):
if root == 'src': cppSources.append(os.path.join(root, 'Infomap.cpp'))
else:
for filename in fnmatch.filter(filenames, '*.cpp'):
cppSources.append(os.path.join(root, filename))
# Extract Infomap version
infomapVersion = ''
with open(os.path.join('src', 'io', 'version.cpp')) as f:
for line in f:
m = re.match( r'.+INFOMAP_VERSION = \"(.+)\"', line)
if m: infomapVersion = m.groups()[0]
infomap_module = Extension('_infomap',
sources=cppSources,
extra_compile_args=['-DAS_LIB']
)
setup (name = 'infomap',
version = infomapVersion,
author = "Team at mapequation.org",
description = """Infomap clustering algorithm""",
url = "www.mapequation.org",
ext_modules = [infomap_module],
py_modules = ["infomap"],
)
# Clean ABI Version Tagged .so Files
libFilename = '_infomap{}'.format(sysconfig.get_config_var('EXT_SUFFIX'))
copy_file(libFilename, '_infomap.so')
|
Fix python library problem due to ABI tagged .so files
|
Fix python library problem due to ABI tagged .so files
|
Python
|
agpl-3.0
|
mapequation/infomap,mapequation/infomap,mapequation/infomap,mapequation/infomap
|
from distutils.core import setup, Extension
+ from distutils.file_util import copy_file
+ import sysconfig
import fnmatch
import os
import re
cppSources = []
for root, dirnames, filenames in os.walk('.'):
if root == 'src': cppSources.append(os.path.join(root, 'Infomap.cpp'))
else:
for filename in fnmatch.filter(filenames, '*.cpp'):
cppSources.append(os.path.join(root, filename))
# Extract Infomap version
infomapVersion = ''
with open(os.path.join('src', 'io', 'version.cpp')) as f:
for line in f:
m = re.match( r'.+INFOMAP_VERSION = \"(.+)\"', line)
if m: infomapVersion = m.groups()[0]
infomap_module = Extension('_infomap',
sources=cppSources,
extra_compile_args=['-DAS_LIB']
)
setup (name = 'infomap',
version = infomapVersion,
author = "Team at mapequation.org",
description = """Infomap clustering algorithm""",
url = "www.mapequation.org",
ext_modules = [infomap_module],
py_modules = ["infomap"],
)
+
+ # Clean ABI Version Tagged .so Files
+ libFilename = '_infomap{}'.format(sysconfig.get_config_var('EXT_SUFFIX'))
+ copy_file(libFilename, '_infomap.so')
|
Fix python library problem due to ABI tagged .so files
|
## Code Before:
from distutils.core import setup, Extension
import fnmatch
import os
import re
cppSources = []
for root, dirnames, filenames in os.walk('.'):
if root == 'src': cppSources.append(os.path.join(root, 'Infomap.cpp'))
else:
for filename in fnmatch.filter(filenames, '*.cpp'):
cppSources.append(os.path.join(root, filename))
# Extract Infomap version
infomapVersion = ''
with open(os.path.join('src', 'io', 'version.cpp')) as f:
for line in f:
m = re.match( r'.+INFOMAP_VERSION = \"(.+)\"', line)
if m: infomapVersion = m.groups()[0]
infomap_module = Extension('_infomap',
sources=cppSources,
extra_compile_args=['-DAS_LIB']
)
setup (name = 'infomap',
version = infomapVersion,
author = "Team at mapequation.org",
description = """Infomap clustering algorithm""",
url = "www.mapequation.org",
ext_modules = [infomap_module],
py_modules = ["infomap"],
)
## Instruction:
Fix python library problem due to ABI tagged .so files
## Code After:
from distutils.core import setup, Extension
from distutils.file_util import copy_file
import sysconfig
import fnmatch
import os
import re
cppSources = []
for root, dirnames, filenames in os.walk('.'):
if root == 'src': cppSources.append(os.path.join(root, 'Infomap.cpp'))
else:
for filename in fnmatch.filter(filenames, '*.cpp'):
cppSources.append(os.path.join(root, filename))
# Extract Infomap version
infomapVersion = ''
with open(os.path.join('src', 'io', 'version.cpp')) as f:
for line in f:
m = re.match( r'.+INFOMAP_VERSION = \"(.+)\"', line)
if m: infomapVersion = m.groups()[0]
infomap_module = Extension('_infomap',
sources=cppSources,
extra_compile_args=['-DAS_LIB']
)
setup (name = 'infomap',
version = infomapVersion,
author = "Team at mapequation.org",
description = """Infomap clustering algorithm""",
url = "www.mapequation.org",
ext_modules = [infomap_module],
py_modules = ["infomap"],
)
# Clean ABI Version Tagged .so Files
libFilename = '_infomap{}'.format(sysconfig.get_config_var('EXT_SUFFIX'))
copy_file(libFilename, '_infomap.so')
|
from distutils.core import setup, Extension
+ from distutils.file_util import copy_file
+ import sysconfig
import fnmatch
import os
import re
cppSources = []
for root, dirnames, filenames in os.walk('.'):
if root == 'src': cppSources.append(os.path.join(root, 'Infomap.cpp'))
else:
for filename in fnmatch.filter(filenames, '*.cpp'):
cppSources.append(os.path.join(root, filename))
# Extract Infomap version
infomapVersion = ''
with open(os.path.join('src', 'io', 'version.cpp')) as f:
for line in f:
m = re.match( r'.+INFOMAP_VERSION = \"(.+)\"', line)
if m: infomapVersion = m.groups()[0]
infomap_module = Extension('_infomap',
sources=cppSources,
extra_compile_args=['-DAS_LIB']
)
setup (name = 'infomap',
version = infomapVersion,
author = "Team at mapequation.org",
description = """Infomap clustering algorithm""",
url = "www.mapequation.org",
ext_modules = [infomap_module],
py_modules = ["infomap"],
)
+
+ # Clean ABI Version Tagged .so Files
+ libFilename = '_infomap{}'.format(sysconfig.get_config_var('EXT_SUFFIX'))
+ copy_file(libFilename, '_infomap.so')
|
2cbbcb6c900869d37f9a11ae56ea38f548233274
|
dask/compatibility.py
|
dask/compatibility.py
|
from __future__ import absolute_import, division, print_function
import sys
PY3 = sys.version_info[0] == 3
PY2 = sys.version_info[0] == 2
if PY3:
import builtins
from queue import Queue, Empty
from itertools import zip_longest
from io import StringIO, BytesIO
from urllib.request import urlopen
from urllib.parse import urlparse
from urllib.parse import quote, unquote
unicode = str
long = int
def apply(func, args, kwargs=None):
if not isinstance(args, list) and kwargs is None:
return func(args)
elif not isinstance(args, list):
return func(args, **kwargs)
elif kwargs:
return func(*args, **kwargs)
else:
return func(*args)
range = range
else:
import __builtin__ as builtins
from Queue import Queue, Empty
import operator
from itertools import izip_longest as zip_longest
from StringIO import StringIO
from io import BytesIO
from urllib2 import urlopen
from urlparse import urlparse
from urllib import quote, unquote
unicode = unicode
long = long
apply = apply
range = xrange
def skip(func):
return
|
from __future__ import absolute_import, division, print_function
import sys
PY3 = sys.version_info[0] == 3
PY2 = sys.version_info[0] == 2
if PY3:
import builtins
from queue import Queue, Empty
from itertools import zip_longest
from io import StringIO, BytesIO
from urllib.request import urlopen
from urllib.parse import urlparse
from urllib.parse import quote, unquote
unicode = str
long = int
def apply(func, args, kwargs=None):
if not isinstance(args, list) and not isinstance(args, tuple) and kwargs is None:
return func(args)
elif not isinstance(args, list) and not isinstance(args, tuple):
return func(args, **kwargs)
elif kwargs:
return func(*args, **kwargs)
else:
return func(*args)
range = range
else:
import __builtin__ as builtins
from Queue import Queue, Empty
import operator
from itertools import izip_longest as zip_longest
from StringIO import StringIO
from io import BytesIO
from urllib2 import urlopen
from urlparse import urlparse
from urllib import quote, unquote
unicode = unicode
long = long
apply = apply
range = xrange
def skip(func):
return
|
Allow for tuple-based args in map also
|
Allow for tuple-based args in map also
|
Python
|
bsd-3-clause
|
vikhyat/dask,blaze/dask,mrocklin/dask,wiso/dask,mikegraham/dask,pombredanne/dask,pombredanne/dask,clarkfitzg/dask,jayhetee/dask,cowlicks/dask,blaze/dask,jayhetee/dask,cpcloud/dask,jcrist/dask,mraspaud/dask,jakirkham/dask,jakirkham/dask,ContinuumIO/dask,mrocklin/dask,chrisbarber/dask,dask/dask,ssanderson/dask,PhE/dask,dask/dask,PhE/dask,clarkfitzg/dask,gameduell/dask,ContinuumIO/dask,vikhyat/dask,wiso/dask,ssanderson/dask,mraspaud/dask,jcrist/dask
|
from __future__ import absolute_import, division, print_function
import sys
PY3 = sys.version_info[0] == 3
PY2 = sys.version_info[0] == 2
if PY3:
import builtins
from queue import Queue, Empty
from itertools import zip_longest
from io import StringIO, BytesIO
from urllib.request import urlopen
from urllib.parse import urlparse
from urllib.parse import quote, unquote
unicode = str
long = int
def apply(func, args, kwargs=None):
- if not isinstance(args, list) and kwargs is None:
+ if not isinstance(args, list) and not isinstance(args, tuple) and kwargs is None:
return func(args)
- elif not isinstance(args, list):
+ elif not isinstance(args, list) and not isinstance(args, tuple):
return func(args, **kwargs)
elif kwargs:
return func(*args, **kwargs)
else:
return func(*args)
range = range
else:
import __builtin__ as builtins
from Queue import Queue, Empty
import operator
from itertools import izip_longest as zip_longest
from StringIO import StringIO
from io import BytesIO
from urllib2 import urlopen
from urlparse import urlparse
from urllib import quote, unquote
unicode = unicode
long = long
apply = apply
range = xrange
def skip(func):
return
|
Allow for tuple-based args in map also
|
## Code Before:
from __future__ import absolute_import, division, print_function
import sys
PY3 = sys.version_info[0] == 3
PY2 = sys.version_info[0] == 2
if PY3:
import builtins
from queue import Queue, Empty
from itertools import zip_longest
from io import StringIO, BytesIO
from urllib.request import urlopen
from urllib.parse import urlparse
from urllib.parse import quote, unquote
unicode = str
long = int
def apply(func, args, kwargs=None):
if not isinstance(args, list) and kwargs is None:
return func(args)
elif not isinstance(args, list):
return func(args, **kwargs)
elif kwargs:
return func(*args, **kwargs)
else:
return func(*args)
range = range
else:
import __builtin__ as builtins
from Queue import Queue, Empty
import operator
from itertools import izip_longest as zip_longest
from StringIO import StringIO
from io import BytesIO
from urllib2 import urlopen
from urlparse import urlparse
from urllib import quote, unquote
unicode = unicode
long = long
apply = apply
range = xrange
def skip(func):
return
## Instruction:
Allow for tuple-based args in map also
## Code After:
from __future__ import absolute_import, division, print_function
import sys
PY3 = sys.version_info[0] == 3
PY2 = sys.version_info[0] == 2
if PY3:
import builtins
from queue import Queue, Empty
from itertools import zip_longest
from io import StringIO, BytesIO
from urllib.request import urlopen
from urllib.parse import urlparse
from urllib.parse import quote, unquote
unicode = str
long = int
def apply(func, args, kwargs=None):
if not isinstance(args, list) and not isinstance(args, tuple) and kwargs is None:
return func(args)
elif not isinstance(args, list) and not isinstance(args, tuple):
return func(args, **kwargs)
elif kwargs:
return func(*args, **kwargs)
else:
return func(*args)
range = range
else:
import __builtin__ as builtins
from Queue import Queue, Empty
import operator
from itertools import izip_longest as zip_longest
from StringIO import StringIO
from io import BytesIO
from urllib2 import urlopen
from urlparse import urlparse
from urllib import quote, unquote
unicode = unicode
long = long
apply = apply
range = xrange
def skip(func):
return
|
from __future__ import absolute_import, division, print_function
import sys
PY3 = sys.version_info[0] == 3
PY2 = sys.version_info[0] == 2
if PY3:
import builtins
from queue import Queue, Empty
from itertools import zip_longest
from io import StringIO, BytesIO
from urllib.request import urlopen
from urllib.parse import urlparse
from urllib.parse import quote, unquote
unicode = str
long = int
def apply(func, args, kwargs=None):
- if not isinstance(args, list) and kwargs is None:
+ if not isinstance(args, list) and not isinstance(args, tuple) and kwargs is None:
? ++++++++++++++++++++++++++++++++
return func(args)
- elif not isinstance(args, list):
+ elif not isinstance(args, list) and not isinstance(args, tuple):
return func(args, **kwargs)
elif kwargs:
return func(*args, **kwargs)
else:
return func(*args)
range = range
else:
import __builtin__ as builtins
from Queue import Queue, Empty
import operator
from itertools import izip_longest as zip_longest
from StringIO import StringIO
from io import BytesIO
from urllib2 import urlopen
from urlparse import urlparse
from urllib import quote, unquote
unicode = unicode
long = long
apply = apply
range = xrange
def skip(func):
return
|
8571f61a20f9ef536040c3101e24c48640a72f6a
|
iss/admin.py
|
iss/admin.py
|
from django.contrib import admin
from .models import Organization
class OrganizationAdmin(admin.ModelAdmin):
list_display = ('account_num', 'org_name', 'city', 'state', 'country_iso')
search_fields = ('org_name', 'account_num')
admin.site.register(Organization, OrganizationAdmin)
|
from django.contrib import admin
from .models import Organization
class OrganizationAdmin(admin.ModelAdmin):
list_display = ('membersuite_id', 'account_num', 'org_name', 'city',
'state', 'country_iso')
search_fields = ('org_name', 'membersuite_id', 'account_num')
admin.site.register(Organization, OrganizationAdmin)
|
Add membersuite ID to display and search
|
Add membersuite ID to display and search
|
Python
|
mit
|
AASHE/iss
|
from django.contrib import admin
from .models import Organization
class OrganizationAdmin(admin.ModelAdmin):
- list_display = ('account_num', 'org_name', 'city', 'state', 'country_iso')
+ list_display = ('membersuite_id', 'account_num', 'org_name', 'city',
+ 'state', 'country_iso')
- search_fields = ('org_name', 'account_num')
+ search_fields = ('org_name', 'membersuite_id', 'account_num')
admin.site.register(Organization, OrganizationAdmin)
|
Add membersuite ID to display and search
|
## Code Before:
from django.contrib import admin
from .models import Organization
class OrganizationAdmin(admin.ModelAdmin):
list_display = ('account_num', 'org_name', 'city', 'state', 'country_iso')
search_fields = ('org_name', 'account_num')
admin.site.register(Organization, OrganizationAdmin)
## Instruction:
Add membersuite ID to display and search
## Code After:
from django.contrib import admin
from .models import Organization
class OrganizationAdmin(admin.ModelAdmin):
list_display = ('membersuite_id', 'account_num', 'org_name', 'city',
'state', 'country_iso')
search_fields = ('org_name', 'membersuite_id', 'account_num')
admin.site.register(Organization, OrganizationAdmin)
|
from django.contrib import admin
from .models import Organization
class OrganizationAdmin(admin.ModelAdmin):
- list_display = ('account_num', 'org_name', 'city', 'state', 'country_iso')
+ list_display = ('membersuite_id', 'account_num', 'org_name', 'city',
+ 'state', 'country_iso')
- search_fields = ('org_name', 'account_num')
+ search_fields = ('org_name', 'membersuite_id', 'account_num')
? ++++++++++++++++++
admin.site.register(Organization, OrganizationAdmin)
|
11d19d1756f6227db894aabcf4bd02e327e292c7
|
tests/test_basic.py
|
tests/test_basic.py
|
from hello_world import hello_world
from unittest import TestCase
class BasicTest(TestCase):
def test_basic_hello_world(self):
"""
Test basic hello world messaging
"""
False
|
from hello_world import hello_world
from unittest import TestCase
class BasicTest(TestCase):
def test_basic_hello_world(self):
"""
Test basic hello world messaging
"""
self.assertTrue(callable(hello_world))
|
Make things a little better
|
Make things a little better
|
Python
|
mit
|
jeansaad/hello_world
|
from hello_world import hello_world
from unittest import TestCase
class BasicTest(TestCase):
def test_basic_hello_world(self):
"""
Test basic hello world messaging
"""
- False
+ self.assertTrue(callable(hello_world))
|
Make things a little better
|
## Code Before:
from hello_world import hello_world
from unittest import TestCase
class BasicTest(TestCase):
def test_basic_hello_world(self):
"""
Test basic hello world messaging
"""
False
## Instruction:
Make things a little better
## Code After:
from hello_world import hello_world
from unittest import TestCase
class BasicTest(TestCase):
def test_basic_hello_world(self):
"""
Test basic hello world messaging
"""
self.assertTrue(callable(hello_world))
|
from hello_world import hello_world
from unittest import TestCase
class BasicTest(TestCase):
def test_basic_hello_world(self):
"""
Test basic hello world messaging
"""
- False
+ self.assertTrue(callable(hello_world))
|
b60cfdb2b338a4f87b4ac6ba7dd03c9c1d751b37
|
scrapi/processing/base.py
|
scrapi/processing/base.py
|
class BaseProcessor(object):
NAME = None
def process_raw(self, raw_doc, **kwargs):
pass # pragma: no cover
def process_normalized(self, raw_doc, normalized, **kwargs):
pass # pragma: no cover
|
import six
import json
from abc import abstractproperty, abstractmethod
from requests.structures import CaseInsensitiveDict
class BaseProcessor(object):
NAME = None
def process_raw(self, raw_doc, **kwargs):
pass # pragma: no cover
def process_normalized(self, raw_doc, normalized, **kwargs):
pass # pragma: no cover
class BaseHarvesterResponse(object):
"""A parody of requests.response but stored in a database for caching
Should reflect all methods of a response object
Contains an additional field time_made, self-explanatory
"""
class DoesNotExist(Exception):
pass
@abstractproperty
def method(self):
raise NotImplementedError
@abstractproperty
def url(self):
raise NotImplementedError
@abstractproperty
def ok(self):
raise NotImplementedError
@abstractproperty
def content(self):
raise NotImplementedError
@abstractproperty
def encoding(self):
raise NotImplementedError
@abstractproperty
def headers_str(self):
raise NotImplementedError
@abstractproperty
def status_code(self):
raise NotImplementedError
@abstractproperty
def time_made(self):
raise NotImplementedError
@classmethod
@abstractmethod
def get(self, url=None, method=None):
raise NotImplementedError
@abstractmethod
def save(self):
raise NotImplementedError
@abstractmethod
def update(self, **kwargs):
raise NotImplementedError
def json(self):
try:
content = self.content.decode('utf-8')
except AttributeError: # python 3eeeee!
content = self.content
return json.loads(content)
@property
def headers(self):
return CaseInsensitiveDict(json.loads(self.headers_str))
@property
def text(self):
return six.u(self.content)
|
Add definition of abstract harvester model
|
Add definition of abstract harvester model
|
Python
|
apache-2.0
|
erinspace/scrapi,felliott/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,erinspace/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,mehanig/scrapi
|
+ import six
+ import json
+ from abc import abstractproperty, abstractmethod
+
+ from requests.structures import CaseInsensitiveDict
+
+
class BaseProcessor(object):
NAME = None
def process_raw(self, raw_doc, **kwargs):
pass # pragma: no cover
def process_normalized(self, raw_doc, normalized, **kwargs):
pass # pragma: no cover
+
+ class BaseHarvesterResponse(object):
+ """A parody of requests.response but stored in a database for caching
+ Should reflect all methods of a response object
+ Contains an additional field time_made, self-explanatory
+ """
+
+ class DoesNotExist(Exception):
+ pass
+
+ @abstractproperty
+ def method(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def url(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def ok(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def content(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def encoding(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def headers_str(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def status_code(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def time_made(self):
+ raise NotImplementedError
+
+ @classmethod
+ @abstractmethod
+ def get(self, url=None, method=None):
+ raise NotImplementedError
+
+ @abstractmethod
+ def save(self):
+ raise NotImplementedError
+
+ @abstractmethod
+ def update(self, **kwargs):
+ raise NotImplementedError
+
+ def json(self):
+ try:
+ content = self.content.decode('utf-8')
+ except AttributeError: # python 3eeeee!
+ content = self.content
+ return json.loads(content)
+
+ @property
+ def headers(self):
+ return CaseInsensitiveDict(json.loads(self.headers_str))
+
+ @property
+ def text(self):
+ return six.u(self.content)
+
|
Add definition of abstract harvester model
|
## Code Before:
class BaseProcessor(object):
NAME = None
def process_raw(self, raw_doc, **kwargs):
pass # pragma: no cover
def process_normalized(self, raw_doc, normalized, **kwargs):
pass # pragma: no cover
## Instruction:
Add definition of abstract harvester model
## Code After:
import six
import json
from abc import abstractproperty, abstractmethod
from requests.structures import CaseInsensitiveDict
class BaseProcessor(object):
NAME = None
def process_raw(self, raw_doc, **kwargs):
pass # pragma: no cover
def process_normalized(self, raw_doc, normalized, **kwargs):
pass # pragma: no cover
class BaseHarvesterResponse(object):
"""A parody of requests.response but stored in a database for caching
Should reflect all methods of a response object
Contains an additional field time_made, self-explanatory
"""
class DoesNotExist(Exception):
pass
@abstractproperty
def method(self):
raise NotImplementedError
@abstractproperty
def url(self):
raise NotImplementedError
@abstractproperty
def ok(self):
raise NotImplementedError
@abstractproperty
def content(self):
raise NotImplementedError
@abstractproperty
def encoding(self):
raise NotImplementedError
@abstractproperty
def headers_str(self):
raise NotImplementedError
@abstractproperty
def status_code(self):
raise NotImplementedError
@abstractproperty
def time_made(self):
raise NotImplementedError
@classmethod
@abstractmethod
def get(self, url=None, method=None):
raise NotImplementedError
@abstractmethod
def save(self):
raise NotImplementedError
@abstractmethod
def update(self, **kwargs):
raise NotImplementedError
def json(self):
try:
content = self.content.decode('utf-8')
except AttributeError: # python 3eeeee!
content = self.content
return json.loads(content)
@property
def headers(self):
return CaseInsensitiveDict(json.loads(self.headers_str))
@property
def text(self):
return six.u(self.content)
|
+ import six
+ import json
+ from abc import abstractproperty, abstractmethod
+
+ from requests.structures import CaseInsensitiveDict
+
+
class BaseProcessor(object):
NAME = None
def process_raw(self, raw_doc, **kwargs):
pass # pragma: no cover
def process_normalized(self, raw_doc, normalized, **kwargs):
pass # pragma: no cover
+
+
+ class BaseHarvesterResponse(object):
+ """A parody of requests.response but stored in a database for caching
+ Should reflect all methods of a response object
+ Contains an additional field time_made, self-explanatory
+ """
+
+ class DoesNotExist(Exception):
+ pass
+
+ @abstractproperty
+ def method(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def url(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def ok(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def content(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def encoding(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def headers_str(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def status_code(self):
+ raise NotImplementedError
+
+ @abstractproperty
+ def time_made(self):
+ raise NotImplementedError
+
+ @classmethod
+ @abstractmethod
+ def get(self, url=None, method=None):
+ raise NotImplementedError
+
+ @abstractmethod
+ def save(self):
+ raise NotImplementedError
+
+ @abstractmethod
+ def update(self, **kwargs):
+ raise NotImplementedError
+
+ def json(self):
+ try:
+ content = self.content.decode('utf-8')
+ except AttributeError: # python 3eeeee!
+ content = self.content
+ return json.loads(content)
+
+ @property
+ def headers(self):
+ return CaseInsensitiveDict(json.loads(self.headers_str))
+
+ @property
+ def text(self):
+ return six.u(self.content)
|
7cde5e713ace2b0a1d9cdef01ac912f3a53814cd
|
run_scripts/build_phylogenies.py
|
run_scripts/build_phylogenies.py
|
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run]
for p in processes:
p.start()
for p in processes:
p.join()
def build_phylogeny(seq, settings):
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import joblib
import pickle
#multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\
(seq, settings_file) for seq in seqs_needing_run)
def pool_process(seq, settings_file):
"""
A hacky and unecessary way to provide a pickle serealisable
object for multiprocessing to pass off to workers
- inefficiency in reinstantiating a settings class every time
"""
settings = dg.settings.Settings(settings_file)
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
Change run script to use worker pool
|
Change run script to use worker pool
|
Python
|
bsd-3-clause
|
fmaguire/dendrogenous
|
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
+ import joblib
+ import pickle
- import multiprocessing
+ #multiprocessing
def main(settings_file):
- settings = dg.settings.Settings(settings_file)
+ settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
- processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run]
+ r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\
+ (seq, settings_file) for seq in seqs_needing_run)
- for p in processes:
- p.start()
- for p in processes:
- p.join()
-
- def build_phylogeny(seq, settings):
+ def pool_process(seq, settings_file):
+ """
+ A hacky and unecessary way to provide a pickle serealisable
+ object for multiprocessing to pass off to workers
+ - inefficiency in reinstantiating a settings class every time
+ """
+ settings = dg.settings.Settings(settings_file)
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
+
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
Change run script to use worker pool
|
## Code Before:
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run]
for p in processes:
p.start()
for p in processes:
p.join()
def build_phylogeny(seq, settings):
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
## Instruction:
Change run script to use worker pool
## Code After:
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import joblib
import pickle
#multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\
(seq, settings_file) for seq in seqs_needing_run)
def pool_process(seq, settings_file):
"""
A hacky and unecessary way to provide a pickle serealisable
object for multiprocessing to pass off to workers
- inefficiency in reinstantiating a settings class every time
"""
settings = dg.settings.Settings(settings_file)
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
+ import joblib
+ import pickle
- import multiprocessing
? ^^^^^^^
+ #multiprocessing
? ^
def main(settings_file):
- settings = dg.settings.Settings(settings_file)
? --------
+ settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
- processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run]
+ r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\
+ (seq, settings_file) for seq in seqs_needing_run)
- for p in processes:
- p.start()
- for p in processes:
- p.join()
-
- def build_phylogeny(seq, settings):
+ def pool_process(seq, settings_file):
+ """
+ A hacky and unecessary way to provide a pickle serealisable
+ object for multiprocessing to pass off to workers
+ - inefficiency in reinstantiating a settings class every time
+ """
+ settings = dg.settings.Settings(settings_file)
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
+
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
b24094f979b90f087698d9696d661df7db857376
|
moonlighty.py
|
moonlighty.py
|
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', 'xbox.conf', '-1080', '-30fps']
#cmd = ["ls", "-l"]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return output
if __name__ == '__main__':
manager.run()
|
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', '/home/pi/xbox.conf', '-1080', '-30fps']
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return None
return 'Steam started'
if __name__ == '__main__':
manager.run()
|
Add return value stating Steam was started
|
Add return value stating Steam was started
|
Python
|
artistic-2.0
|
VladimirDaniyan/moonlighty,VladimirDaniyan/moonlighty
|
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
- cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', 'xbox.conf', '-1080', '-30fps']
+ cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', '/home/pi/xbox.conf', '-1080', '-30fps']
- #cmd = ["ls", "-l"]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
- output, err = p.communicate()
+ err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
- return output
+ return None
+ return 'Steam started'
if __name__ == '__main__':
manager.run()
|
Add return value stating Steam was started
|
## Code Before:
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', 'xbox.conf', '-1080', '-30fps']
#cmd = ["ls", "-l"]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return output
if __name__ == '__main__':
manager.run()
## Instruction:
Add return value stating Steam was started
## Code After:
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', '/home/pi/xbox.conf', '-1080', '-30fps']
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return None
return 'Steam started'
if __name__ == '__main__':
manager.run()
|
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
- cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', 'xbox.conf', '-1080', '-30fps']
+ cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', '/home/pi/xbox.conf', '-1080', '-30fps']
? +++++++++
- #cmd = ["ls", "-l"]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
- output, err = p.communicate()
? --------
+ err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
- return output
? ^^^^^
+ return None
? + ^^
+ return 'Steam started'
if __name__ == '__main__':
manager.run()
|
de7219dd9d40f316dc0dd6f6c2cad68e66898762
|
tests/test_live.py
|
tests/test_live.py
|
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
os.environ.get("CI") == "true",
reason="No tests against real servers on CI servers",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
|
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
os.environ.get("AIOSMTPLIB_LIVE_TESTS") != "true",
reason="No tests against real servers unless requested",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
|
Disable live tests by default
|
Disable live tests by default
|
Python
|
mit
|
cole/aiosmtplib
|
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
- os.environ.get("CI") == "true",
+ os.environ.get("AIOSMTPLIB_LIVE_TESTS") != "true",
- reason="No tests against real servers on CI servers",
+ reason="No tests against real servers unless requested",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
|
Disable live tests by default
|
## Code Before:
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
os.environ.get("CI") == "true",
reason="No tests against real servers on CI servers",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
## Instruction:
Disable live tests by default
## Code After:
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
os.environ.get("AIOSMTPLIB_LIVE_TESTS") != "true",
reason="No tests against real servers unless requested",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
|
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
- os.environ.get("CI") == "true",
? ^ ^
+ os.environ.get("AIOSMTPLIB_LIVE_TESTS") != "true",
? ^ +++++++++++++++++++ ^
- reason="No tests against real servers on CI servers",
? ^ ^^^ ^^^^^
+ reason="No tests against real servers unless requested",
? ^ ++++ ^^^^^ + ^
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
|
1bba76808aa5c598f1558cd127d8ed4a006692e1
|
tests/conftest.py
|
tests/conftest.py
|
import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
|
import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
# We need to import qtpy here to make sure that the API versions get set
# straight away.
import qtpy
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
|
Make sure we import qtpy before importing any Qt wrappers directly
|
Make sure we import qtpy before importing any Qt wrappers directly
|
Python
|
mit
|
spyder-ide/qtpy,goanpeca/qtpy,davvid/qtpy,davvid/qtpy,goanpeca/qtpy
|
import os
def pytest_configure(config):
+
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
+
+ # We need to import qtpy here to make sure that the API versions get set
+ # straight away.
+ import qtpy
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
|
Make sure we import qtpy before importing any Qt wrappers directly
|
## Code Before:
import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
## Instruction:
Make sure we import qtpy before importing any Qt wrappers directly
## Code After:
import os
def pytest_configure(config):
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
# We need to import qtpy here to make sure that the API versions get set
# straight away.
import qtpy
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
|
import os
def pytest_configure(config):
+
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
+
+ # We need to import qtpy here to make sure that the API versions get set
+ # straight away.
+ import qtpy
def pytest_report_header(config):
versions = os.linesep
versions += 'PyQt4: '
try:
from PyQt4 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide: '
try:
import PySide
from PySide import QtCore
versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__)
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
|
1ce0d9898fc31f08bbf5765b3a687eaa8067a465
|
flaskext/flask_scss.py
|
flaskext/flask_scss.py
|
from .scss import Scss
|
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
Python
|
mit
|
bcarlin/flask-scss
|
from .scss import Scss
+ from warnings import warn
+ warn(DeprecationWarning('Deprecated import method. '
+ 'Please use:\n '
+ 'from flask.ext.scss import Scss'), stacklevel=2)
+
|
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
## Code Before:
from .scss import Scss
## Instruction:
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
## Code After:
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
from .scss import Scss
+
+ from warnings import warn
+ warn(DeprecationWarning('Deprecated import method. '
+ 'Please use:\n '
+ 'from flask.ext.scss import Scss'), stacklevel=2)
|
6cd9c7285d462311580754229d0b85af844dd387
|
test/integration/test_cli.py
|
test/integration/test_cli.py
|
import unittest
class TestCLI(unittest.TestCase):
def test_kubos_installed(self):
self.assertEqual('foo'.upper(), 'FOO')
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
if __name__ == '__main__':
unittest.main()
|
import unittest
import re
import subprocess
class TestCLI(unittest.TestCase):
def test_latest_kubos_installed(self):
bashCommand = "vagrant ssh -c 'kubos update'"
process = subprocess.Popen(bashCommand.split())
output, error = process.communicate()
regex = re.compile(r"All up to date!")
self.assertTrue(regex.search( output ))
if __name__ == '__main__':
unittest.main()
|
Update integration test with actual...integration test
|
Update integration test with actual...integration test
|
Python
|
apache-2.0
|
Psykar/kubos,kubostech/KubOS,Psykar/kubos,Psykar/kubos,Psykar/kubos,kubostech/KubOS,Psykar/kubos,Psykar/kubos,Psykar/kubos
|
import unittest
+ import re
+ import subprocess
class TestCLI(unittest.TestCase):
-
- def test_kubos_installed(self):
+ def test_latest_kubos_installed(self):
+ bashCommand = "vagrant ssh -c 'kubos update'"
+ process = subprocess.Popen(bashCommand.split())
+ output, error = process.communicate()
+ regex = re.compile(r"All up to date!")
+ self.assertTrue(regex.search( output ))
- self.assertEqual('foo'.upper(), 'FOO')
- self.assertTrue('FOO'.isupper())
- self.assertFalse('Foo'.isupper())
- s = 'hello world'
- self.assertEqual(s.split(), ['hello', 'world'])
- # check that s.split fails when the separator is not a string
- with self.assertRaises(TypeError):
- s.split(2)
if __name__ == '__main__':
unittest.main()
|
Update integration test with actual...integration test
|
## Code Before:
import unittest
class TestCLI(unittest.TestCase):
def test_kubos_installed(self):
self.assertEqual('foo'.upper(), 'FOO')
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
if __name__ == '__main__':
unittest.main()
## Instruction:
Update integration test with actual...integration test
## Code After:
import unittest
import re
import subprocess
class TestCLI(unittest.TestCase):
def test_latest_kubos_installed(self):
bashCommand = "vagrant ssh -c 'kubos update'"
process = subprocess.Popen(bashCommand.split())
output, error = process.communicate()
regex = re.compile(r"All up to date!")
self.assertTrue(regex.search( output ))
if __name__ == '__main__':
unittest.main()
|
import unittest
+ import re
+ import subprocess
class TestCLI(unittest.TestCase):
-
- def test_kubos_installed(self):
+ def test_latest_kubos_installed(self):
? +++++++
+ bashCommand = "vagrant ssh -c 'kubos update'"
+ process = subprocess.Popen(bashCommand.split())
+ output, error = process.communicate()
+ regex = re.compile(r"All up to date!")
+ self.assertTrue(regex.search( output ))
- self.assertEqual('foo'.upper(), 'FOO')
- self.assertTrue('FOO'.isupper())
- self.assertFalse('Foo'.isupper())
- s = 'hello world'
- self.assertEqual(s.split(), ['hello', 'world'])
- # check that s.split fails when the separator is not a string
- with self.assertRaises(TypeError):
- s.split(2)
if __name__ == '__main__':
unittest.main()
|
5c41286666290c2a067c51b7ab9ea171e4657d69
|
fb/models.py
|
fb/models.py
|
from django.db import models
# Create your models here.
|
from django.db import models
class UserPost(models.Model):
text = models.TextField(max_length=200)
date_added = models.DateTimeField(auto_now_add=True)
author = models.CharField(default='Eau De Web', max_length=20)
def __unicode__(self):
return '{} @ {}'.format(self.author, self.date_added)
|
Write a model class for user posts.
|
Write a model class for user posts.
|
Python
|
apache-2.0
|
pure-python/brainmate
|
from django.db import models
- # Create your models here.
+ class UserPost(models.Model):
+ text = models.TextField(max_length=200)
+ date_added = models.DateTimeField(auto_now_add=True)
+ author = models.CharField(default='Eau De Web', max_length=20)
+
+ def __unicode__(self):
+ return '{} @ {}'.format(self.author, self.date_added)
+
|
Write a model class for user posts.
|
## Code Before:
from django.db import models
# Create your models here.
## Instruction:
Write a model class for user posts.
## Code After:
from django.db import models
class UserPost(models.Model):
text = models.TextField(max_length=200)
date_added = models.DateTimeField(auto_now_add=True)
author = models.CharField(default='Eau De Web', max_length=20)
def __unicode__(self):
return '{} @ {}'.format(self.author, self.date_added)
|
from django.db import models
- # Create your models here.
+
+ class UserPost(models.Model):
+ text = models.TextField(max_length=200)
+ date_added = models.DateTimeField(auto_now_add=True)
+ author = models.CharField(default='Eau De Web', max_length=20)
+
+ def __unicode__(self):
+ return '{} @ {}'.format(self.author, self.date_added)
|
392f209791eede86d65f018a9b873b33cb7ccb02
|
test/test_uniprot_retrieval_data.py
|
test/test_uniprot_retrieval_data.py
|
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist(), df_result_truth['GOs'].tolist())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist().sort(), df_result_truth['GOs'].tolist().sort())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
Fix issue with GO term (unsorted).
|
Fix issue with GO term (unsorted).
|
Python
|
agpl-3.0
|
ArnaudBelcour/Workflow_GeneList_Analysis,ArnaudBelcour/Workflow_GeneList_Analysis
|
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
- np.testing.assert_array_equal(df_result['GOs'].tolist(), df_result_truth['GOs'].tolist())
+ np.testing.assert_array_equal(df_result['GOs'].tolist().sort(), df_result_truth['GOs'].tolist().sort())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
Fix issue with GO term (unsorted).
|
## Code Before:
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist(), df_result_truth['GOs'].tolist())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
## Instruction:
Fix issue with GO term (unsorted).
## Code After:
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist().sort(), df_result_truth['GOs'].tolist().sort())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
- np.testing.assert_array_equal(df_result['GOs'].tolist(), df_result_truth['GOs'].tolist())
+ np.testing.assert_array_equal(df_result['GOs'].tolist().sort(), df_result_truth['GOs'].tolist().sort())
? +++++++ ++++++ +
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
93ba327a3198c587d791aeb1d285f6e7f339df20
|
app/grandchallenge/archives/models.py
|
app/grandchallenge/archives/models.py
|
from django.db import models
from grandchallenge.core.models import UUIDModel
from grandchallenge.cases.models import Image
from grandchallenge.patients.models import Patient
class Archive(UUIDModel):
"""
Model for archive. Contains a collection of images
"""
name = models.CharField(max_length=255, default="Unnamed Archive")
images = models.ManyToManyField(Image)
def __str__(self):
return f"<{self.__class__.__name__} {self.name}>"
def delete(self, *args, **kwargs):
# Remove all related patients and other models via cascading
Patient.objects.filter(study__image__archive__id=self.id).delete()
super().delete(*args, **kwargs)
|
from django.db import models
from grandchallenge.core.models import UUIDModel
from grandchallenge.cases.models import Image
from grandchallenge.patients.models import Patient
class Archive(UUIDModel):
"""
Model for archive. Contains a collection of images
"""
name = models.CharField(max_length=255, default="Unnamed Archive")
images = models.ManyToManyField(Image)
def __str__(self):
return f"<{self.__class__.__name__} {self.name}>"
def delete(self, *args, **kwargs):
# Remove all related patients and other models via cascading
Patient.objects.filter(study__image__archive__id=self.id).delete(
*args, **kwargs
)
super().delete(*args, **kwargs)
|
Add args and kwargs to delete method
|
Add args and kwargs to delete method
|
Python
|
apache-2.0
|
comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django
|
from django.db import models
from grandchallenge.core.models import UUIDModel
from grandchallenge.cases.models import Image
from grandchallenge.patients.models import Patient
class Archive(UUIDModel):
"""
Model for archive. Contains a collection of images
"""
name = models.CharField(max_length=255, default="Unnamed Archive")
images = models.ManyToManyField(Image)
def __str__(self):
return f"<{self.__class__.__name__} {self.name}>"
def delete(self, *args, **kwargs):
# Remove all related patients and other models via cascading
- Patient.objects.filter(study__image__archive__id=self.id).delete()
+ Patient.objects.filter(study__image__archive__id=self.id).delete(
+ *args, **kwargs
+ )
super().delete(*args, **kwargs)
|
Add args and kwargs to delete method
|
## Code Before:
from django.db import models
from grandchallenge.core.models import UUIDModel
from grandchallenge.cases.models import Image
from grandchallenge.patients.models import Patient
class Archive(UUIDModel):
"""
Model for archive. Contains a collection of images
"""
name = models.CharField(max_length=255, default="Unnamed Archive")
images = models.ManyToManyField(Image)
def __str__(self):
return f"<{self.__class__.__name__} {self.name}>"
def delete(self, *args, **kwargs):
# Remove all related patients and other models via cascading
Patient.objects.filter(study__image__archive__id=self.id).delete()
super().delete(*args, **kwargs)
## Instruction:
Add args and kwargs to delete method
## Code After:
from django.db import models
from grandchallenge.core.models import UUIDModel
from grandchallenge.cases.models import Image
from grandchallenge.patients.models import Patient
class Archive(UUIDModel):
"""
Model for archive. Contains a collection of images
"""
name = models.CharField(max_length=255, default="Unnamed Archive")
images = models.ManyToManyField(Image)
def __str__(self):
return f"<{self.__class__.__name__} {self.name}>"
def delete(self, *args, **kwargs):
# Remove all related patients and other models via cascading
Patient.objects.filter(study__image__archive__id=self.id).delete(
*args, **kwargs
)
super().delete(*args, **kwargs)
|
from django.db import models
from grandchallenge.core.models import UUIDModel
from grandchallenge.cases.models import Image
from grandchallenge.patients.models import Patient
class Archive(UUIDModel):
"""
Model for archive. Contains a collection of images
"""
name = models.CharField(max_length=255, default="Unnamed Archive")
images = models.ManyToManyField(Image)
def __str__(self):
return f"<{self.__class__.__name__} {self.name}>"
def delete(self, *args, **kwargs):
# Remove all related patients and other models via cascading
- Patient.objects.filter(study__image__archive__id=self.id).delete()
? -
+ Patient.objects.filter(study__image__archive__id=self.id).delete(
+ *args, **kwargs
+ )
super().delete(*args, **kwargs)
|
bda36d78984ee8b4701315170f004ed6955072ac
|
common/widgets.py
|
common/widgets.py
|
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
return format_html('<a{}><p{}>{}</p></a>',
flatatt({'href': value.url}),
flatatt({'class': 'form-control-static'}),
value.name)
|
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
from django.utils.translation import ugettext as _
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
outer_attrs = {}
if attrs:
outer_attrs.update(attrs)
if value:
outer_attrs['href'] = value.url
return format_html('<a{}><p{}>{}</p></a>',
flatatt(outer_attrs),
flatatt({'class': 'form-control-static'}),
value.name)
else:
outer_attrs['class'] = 'form-control-static'
return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
|
Handle "no file uploaded" situation in FileFieldLink
|
Handle "no file uploaded" situation in FileFieldLink
Fixes ValueErrors when user has no identity card uploaded
|
Python
|
agpl-3.0
|
m4tx/egielda,m4tx/egielda,m4tx/egielda
|
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
+
+ from django.utils.translation import ugettext as _
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
+ outer_attrs = {}
+ if attrs:
+ outer_attrs.update(attrs)
+ if value:
+ outer_attrs['href'] = value.url
- return format_html('<a{}><p{}>{}</p></a>',
+ return format_html('<a{}><p{}>{}</p></a>',
- flatatt({'href': value.url}),
+ flatatt(outer_attrs),
- flatatt({'class': 'form-control-static'}),
+ flatatt({'class': 'form-control-static'}),
- value.name)
+ value.name)
+ else:
+ outer_attrs['class'] = 'form-control-static'
+ return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
|
Handle "no file uploaded" situation in FileFieldLink
|
## Code Before:
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
return format_html('<a{}><p{}>{}</p></a>',
flatatt({'href': value.url}),
flatatt({'class': 'form-control-static'}),
value.name)
## Instruction:
Handle "no file uploaded" situation in FileFieldLink
## Code After:
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
from django.utils.translation import ugettext as _
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
outer_attrs = {}
if attrs:
outer_attrs.update(attrs)
if value:
outer_attrs['href'] = value.url
return format_html('<a{}><p{}>{}</p></a>',
flatatt(outer_attrs),
flatatt({'class': 'form-control-static'}),
value.name)
else:
outer_attrs['class'] = 'form-control-static'
return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
|
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
+
+ from django.utils.translation import ugettext as _
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
+ outer_attrs = {}
+ if attrs:
+ outer_attrs.update(attrs)
+ if value:
+ outer_attrs['href'] = value.url
- return format_html('<a{}><p{}>{}</p></a>',
+ return format_html('<a{}><p{}>{}</p></a>',
? ++++
- flatatt({'href': value.url}),
+ flatatt(outer_attrs),
- flatatt({'class': 'form-control-static'}),
+ flatatt({'class': 'form-control-static'}),
? ++++
- value.name)
+ value.name)
? ++++
+ else:
+ outer_attrs['class'] = 'form-control-static'
+ return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
|
b7e657134c21b62e78453b11f0745e0048e346bf
|
examples/simple_distribution.py
|
examples/simple_distribution.py
|
import sys
import time
from random import shuffle
from vania.fair_distributor import FairDistributor
def main():
# User input for the number of targets and objects.
users = ['user1', 'user2']
tasks = ['task1', 'task2']
preferences = [
[1, 2],
[2, 1],
]
# Run solver
start_time = time.time()
distributor = FairDistributor(users, tasks, preferences)
output = distributor.distribute(output='problem.lp')
elapsed_time = time.time() - start_time
# Output
print(output)
if __name__ == '__main__':
main()
|
import sys
import time
from random import shuffle
from vania.fair_distributor import FairDistributor
def main():
# User input for the number of targets and objects.
users = ['user1', 'user2']
tasks = ['task1', 'task2']
preferences = [
[1, 2],
[2, 1],
]
# Run solver
distributor = FairDistributor(users, tasks, preferences)
output = distributor.distribute(output='problem.lp')
# Output
print(output)
if __name__ == '__main__':
main()
|
Remove time metrics from the simple example
|
Remove time metrics from the simple example
|
Python
|
mit
|
Hackathonners/vania
|
import sys
import time
from random import shuffle
from vania.fair_distributor import FairDistributor
def main():
# User input for the number of targets and objects.
users = ['user1', 'user2']
tasks = ['task1', 'task2']
preferences = [
[1, 2],
[2, 1],
]
# Run solver
- start_time = time.time()
distributor = FairDistributor(users, tasks, preferences)
output = distributor.distribute(output='problem.lp')
- elapsed_time = time.time() - start_time
# Output
print(output)
if __name__ == '__main__':
main()
|
Remove time metrics from the simple example
|
## Code Before:
import sys
import time
from random import shuffle
from vania.fair_distributor import FairDistributor
def main():
# User input for the number of targets and objects.
users = ['user1', 'user2']
tasks = ['task1', 'task2']
preferences = [
[1, 2],
[2, 1],
]
# Run solver
start_time = time.time()
distributor = FairDistributor(users, tasks, preferences)
output = distributor.distribute(output='problem.lp')
elapsed_time = time.time() - start_time
# Output
print(output)
if __name__ == '__main__':
main()
## Instruction:
Remove time metrics from the simple example
## Code After:
import sys
import time
from random import shuffle
from vania.fair_distributor import FairDistributor
def main():
# User input for the number of targets and objects.
users = ['user1', 'user2']
tasks = ['task1', 'task2']
preferences = [
[1, 2],
[2, 1],
]
# Run solver
distributor = FairDistributor(users, tasks, preferences)
output = distributor.distribute(output='problem.lp')
# Output
print(output)
if __name__ == '__main__':
main()
|
import sys
import time
from random import shuffle
from vania.fair_distributor import FairDistributor
def main():
# User input for the number of targets and objects.
users = ['user1', 'user2']
tasks = ['task1', 'task2']
preferences = [
[1, 2],
[2, 1],
]
# Run solver
- start_time = time.time()
distributor = FairDistributor(users, tasks, preferences)
output = distributor.distribute(output='problem.lp')
- elapsed_time = time.time() - start_time
# Output
print(output)
if __name__ == '__main__':
main()
|
20a92ff1ffe143193d95235c7a5ea8e9edb0df64
|
yowsup/layers/protocol_acks/protocolentities/ack_outgoing.py
|
yowsup/layers/protocol_acks/protocolentities/ack_outgoing.py
|
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from .ack import AckProtocolEntity
class OutgoingAckProtocolEntity(AckProtocolEntity):
'''
<ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}">
</ack>
'''
def __init__(self, _id, _class, _type, _to):
super(OutgoingAckProtocolEntity, self).__init__(_id, _class)
self.setOutgoingData(_type, _to)
def setOutgoingData(self, _type, _to):
self._type = _type
self._to = _to
def toProtocolTreeNode(self):
node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode()
if self._type:
node.setAttribute("type", self._type)
node.setAttribute("to", self._to)
return node
def __str__(self):
out = super(OutgoingAckProtocolEntity, self).__str__()
out += "Type: %s\n" % self._type
out += "To: %s\n" % self._to
return out
@staticmethod
def fromProtocolTreeNode(node):
entity = AckProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = OutgoingAckProtocolEntity
entity.setOutgoingData(
node.getAttributeValue("type"),
node.getAttributeValue("to")
)
return entity
|
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from .ack import AckProtocolEntity
class OutgoingAckProtocolEntity(AckProtocolEntity):
'''
<ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}">
</ack>
<ack to="{{GROUP_JID}}" participant="{{JID}}" id="{{MESSAGE_ID}}" class="receipt" type="{{read | }}">
</ack>
'''
def __init__(self, _id, _class, _type, _to, _participant = None):
super(OutgoingAckProtocolEntity, self).__init__(_id, _class)
self.setOutgoingData(_type, _to, _participant)
def setOutgoingData(self, _type, _to, _participant):
self._type = _type
self._to = _to
self._participant = _participant
def toProtocolTreeNode(self):
node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode()
if self._type:
node.setAttribute("type", self._type)
node.setAttribute("to", self._to)
if self._participant:
node.setAttribute("participant", self._participant)
return node
def __str__(self):
out = super(OutgoingAckProtocolEntity, self).__str__()
out += "Type: %s\n" % self._type
out += "To: %s\n" % self._to
if self._participant:
out += "Participant: %s\n" % self._participant
return out
@staticmethod
def fromProtocolTreeNode(node):
entity = AckProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = OutgoingAckProtocolEntity
entity.setOutgoingData(
node.getAttributeValue("type"),
node.getAttributeValue("to"),
node.getAttributeValue("participant")
)
return entity
|
Include participant in outgoing ack
|
Include participant in outgoing ack
|
Python
|
mit
|
ongair/yowsup,biji/yowsup
|
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from .ack import AckProtocolEntity
class OutgoingAckProtocolEntity(AckProtocolEntity):
'''
<ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}">
</ack>
+
+ <ack to="{{GROUP_JID}}" participant="{{JID}}" id="{{MESSAGE_ID}}" class="receipt" type="{{read | }}">
+ </ack>
+
'''
- def __init__(self, _id, _class, _type, _to):
+ def __init__(self, _id, _class, _type, _to, _participant = None):
super(OutgoingAckProtocolEntity, self).__init__(_id, _class)
- self.setOutgoingData(_type, _to)
+ self.setOutgoingData(_type, _to, _participant)
- def setOutgoingData(self, _type, _to):
+ def setOutgoingData(self, _type, _to, _participant):
self._type = _type
self._to = _to
+ self._participant = _participant
def toProtocolTreeNode(self):
node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode()
if self._type:
node.setAttribute("type", self._type)
node.setAttribute("to", self._to)
+ if self._participant:
+ node.setAttribute("participant", self._participant)
return node
def __str__(self):
out = super(OutgoingAckProtocolEntity, self).__str__()
out += "Type: %s\n" % self._type
out += "To: %s\n" % self._to
+ if self._participant:
+ out += "Participant: %s\n" % self._participant
return out
@staticmethod
def fromProtocolTreeNode(node):
entity = AckProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = OutgoingAckProtocolEntity
entity.setOutgoingData(
node.getAttributeValue("type"),
- node.getAttributeValue("to")
+ node.getAttributeValue("to"),
+ node.getAttributeValue("participant")
)
return entity
|
Include participant in outgoing ack
|
## Code Before:
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from .ack import AckProtocolEntity
class OutgoingAckProtocolEntity(AckProtocolEntity):
'''
<ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}">
</ack>
'''
def __init__(self, _id, _class, _type, _to):
super(OutgoingAckProtocolEntity, self).__init__(_id, _class)
self.setOutgoingData(_type, _to)
def setOutgoingData(self, _type, _to):
self._type = _type
self._to = _to
def toProtocolTreeNode(self):
node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode()
if self._type:
node.setAttribute("type", self._type)
node.setAttribute("to", self._to)
return node
def __str__(self):
out = super(OutgoingAckProtocolEntity, self).__str__()
out += "Type: %s\n" % self._type
out += "To: %s\n" % self._to
return out
@staticmethod
def fromProtocolTreeNode(node):
entity = AckProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = OutgoingAckProtocolEntity
entity.setOutgoingData(
node.getAttributeValue("type"),
node.getAttributeValue("to")
)
return entity
## Instruction:
Include participant in outgoing ack
## Code After:
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from .ack import AckProtocolEntity
class OutgoingAckProtocolEntity(AckProtocolEntity):
'''
<ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}">
</ack>
<ack to="{{GROUP_JID}}" participant="{{JID}}" id="{{MESSAGE_ID}}" class="receipt" type="{{read | }}">
</ack>
'''
def __init__(self, _id, _class, _type, _to, _participant = None):
super(OutgoingAckProtocolEntity, self).__init__(_id, _class)
self.setOutgoingData(_type, _to, _participant)
def setOutgoingData(self, _type, _to, _participant):
self._type = _type
self._to = _to
self._participant = _participant
def toProtocolTreeNode(self):
node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode()
if self._type:
node.setAttribute("type", self._type)
node.setAttribute("to", self._to)
if self._participant:
node.setAttribute("participant", self._participant)
return node
def __str__(self):
out = super(OutgoingAckProtocolEntity, self).__str__()
out += "Type: %s\n" % self._type
out += "To: %s\n" % self._to
if self._participant:
out += "Participant: %s\n" % self._participant
return out
@staticmethod
def fromProtocolTreeNode(node):
entity = AckProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = OutgoingAckProtocolEntity
entity.setOutgoingData(
node.getAttributeValue("type"),
node.getAttributeValue("to"),
node.getAttributeValue("participant")
)
return entity
|
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from .ack import AckProtocolEntity
class OutgoingAckProtocolEntity(AckProtocolEntity):
'''
<ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}">
</ack>
+
+ <ack to="{{GROUP_JID}}" participant="{{JID}}" id="{{MESSAGE_ID}}" class="receipt" type="{{read | }}">
+ </ack>
+
'''
- def __init__(self, _id, _class, _type, _to):
+ def __init__(self, _id, _class, _type, _to, _participant = None):
? +++++++++++++++++++++
super(OutgoingAckProtocolEntity, self).__init__(_id, _class)
- self.setOutgoingData(_type, _to)
+ self.setOutgoingData(_type, _to, _participant)
? ++++++++++++++
- def setOutgoingData(self, _type, _to):
+ def setOutgoingData(self, _type, _to, _participant):
? ++++++++++++++
self._type = _type
self._to = _to
+ self._participant = _participant
def toProtocolTreeNode(self):
node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode()
if self._type:
node.setAttribute("type", self._type)
node.setAttribute("to", self._to)
+ if self._participant:
+ node.setAttribute("participant", self._participant)
return node
def __str__(self):
out = super(OutgoingAckProtocolEntity, self).__str__()
out += "Type: %s\n" % self._type
out += "To: %s\n" % self._to
+ if self._participant:
+ out += "Participant: %s\n" % self._participant
return out
@staticmethod
def fromProtocolTreeNode(node):
entity = AckProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = OutgoingAckProtocolEntity
entity.setOutgoingData(
node.getAttributeValue("type"),
- node.getAttributeValue("to")
+ node.getAttributeValue("to"),
? +
+ node.getAttributeValue("participant")
)
return entity
|
3767fc5d1bf21d4321342e7332f569a97b7396b4
|
ipython/config_helper_functions.py
|
ipython/config_helper_functions.py
|
import os
import IPython.ipapi
ip = IPython.ipapi.get()
# some config helper functions you can use
def import_mod(modules):
""" Usage: import_mod("os sys") """
for m in modules.split():
ip.ex("import %s" % m)
def import_all(modules):
""" Usage: import_all("os sys") """
for m in modules.split():
ip.ex("from %s import *" % m)
def import_some(module,things):
'''Usage: import_some('path','makepath path')'''
for thing in things.split():
ip.ex('from %s import %s' % (module,thing))
def execf(fname):
""" Execute a file in user namespace """
ip.ex('execfile("%s")' % os.path.expanduser(fname))
|
"""Some config helper functions you can use"""
import os
import IPython.ipapi
ip = IPython.ipapi.get()
def import_mod(modules):
""" Usage: import_mod("os sys") """
for m in modules.split():
ip.ex("import %s" % m)
def import_all(modules):
""" Usage: import_all("os sys") """
for m in modules.split():
ip.ex("from %s import *" % m)
def import_some(module,things):
"""Usage: import_some('path','path')"""
for thing in things.split():
ip.ex('from %s import %s' % (module,thing))
def execf(fname):
""" Execute a file in user namespace """
ip.ex('execfile("%s")' % os.path.expanduser(fname))
|
Use triple quotes for docstrings
|
Use triple quotes for docstrings
git-svn-id: 71e07130022bd36facd9e5e4cff6aac120a9d616@756 f6a8b572-8bf8-43d5-8295-329fc01c5294
|
Python
|
mit
|
jalanb/jab,jalanb/dotjab,jalanb/dotjab,jalanb/jab
|
+ """Some config helper functions you can use"""
import os
import IPython.ipapi
ip = IPython.ipapi.get()
- # some config helper functions you can use
def import_mod(modules):
""" Usage: import_mod("os sys") """
for m in modules.split():
ip.ex("import %s" % m)
def import_all(modules):
""" Usage: import_all("os sys") """
for m in modules.split():
ip.ex("from %s import *" % m)
def import_some(module,things):
- '''Usage: import_some('path','makepath path')'''
+ """Usage: import_some('path','path')"""
for thing in things.split():
ip.ex('from %s import %s' % (module,thing))
def execf(fname):
""" Execute a file in user namespace """
ip.ex('execfile("%s")' % os.path.expanduser(fname))
|
Use triple quotes for docstrings
|
## Code Before:
import os
import IPython.ipapi
ip = IPython.ipapi.get()
# some config helper functions you can use
def import_mod(modules):
""" Usage: import_mod("os sys") """
for m in modules.split():
ip.ex("import %s" % m)
def import_all(modules):
""" Usage: import_all("os sys") """
for m in modules.split():
ip.ex("from %s import *" % m)
def import_some(module,things):
'''Usage: import_some('path','makepath path')'''
for thing in things.split():
ip.ex('from %s import %s' % (module,thing))
def execf(fname):
""" Execute a file in user namespace """
ip.ex('execfile("%s")' % os.path.expanduser(fname))
## Instruction:
Use triple quotes for docstrings
## Code After:
"""Some config helper functions you can use"""
import os
import IPython.ipapi
ip = IPython.ipapi.get()
def import_mod(modules):
""" Usage: import_mod("os sys") """
for m in modules.split():
ip.ex("import %s" % m)
def import_all(modules):
""" Usage: import_all("os sys") """
for m in modules.split():
ip.ex("from %s import *" % m)
def import_some(module,things):
"""Usage: import_some('path','path')"""
for thing in things.split():
ip.ex('from %s import %s' % (module,thing))
def execf(fname):
""" Execute a file in user namespace """
ip.ex('execfile("%s")' % os.path.expanduser(fname))
|
+ """Some config helper functions you can use"""
import os
import IPython.ipapi
ip = IPython.ipapi.get()
- # some config helper functions you can use
def import_mod(modules):
""" Usage: import_mod("os sys") """
for m in modules.split():
ip.ex("import %s" % m)
def import_all(modules):
""" Usage: import_all("os sys") """
for m in modules.split():
ip.ex("from %s import *" % m)
def import_some(module,things):
- '''Usage: import_some('path','makepath path')'''
? ^^^ --------- ^^^
+ """Usage: import_some('path','path')"""
? ^^^ ^^^
for thing in things.split():
ip.ex('from %s import %s' % (module,thing))
def execf(fname):
""" Execute a file in user namespace """
ip.ex('execfile("%s")' % os.path.expanduser(fname))
|
970978b5355259fe943d5efed1b8b4ce945fdfa7
|
weather.py
|
weather.py
|
from os.path import expanduser,isfile
from sys import argv
from urllib import urlopen
location_path="~/.location"
def location_from_homedir():
if isfile(expanduser(location_path)):
with open(expanduser(location_path)) as f:
return "&".join(f.read().split("\n"))
else:
print("no location file at ", location_path)
def location_from_file(file):
try:
f = open(expanduser(file),'r')
except:
print("file ", location_file, " not found")
location_from_homedir
if len(argv) == 1:
# not given location file
data = location_from_homedir()
elif len(argv) == 2:
# given location file
data = location_from_file(argv[1])
else:
# wrong number of arguments
print("Usage: ", argv[0], " [location file]")
url="http://forecast.weather.gov/MapClick.php?"+data+"FcstType=digitalDWML"
forecast = urlopen(url).read()
|
from os.path import expanduser,isfile
import sys
from urllib import urlopen
location_path="~/.location"
def location_from_homedir():
if isfile(expanduser(location_path)):
with open(expanduser(location_path)) as f:
return "&".join(f.read().split("\n"))
else:
print("no location file at ", location_path)
sys.exit(2)
def location_from_file(location_file):
try:
f = open(expanduser(location_file),'r')
except:
print("file ", location_file, " not found\nLooking in home directory")
return location_from_homedir()
if len(sys.argv) == 1:
# not given location file
data = location_from_homedir()
elif len(sys.argv) == 2:
# given location file
data = location_from_file(sys.argv[1])
else:
# wrong number of arguments
print("Usage: ", sys.argv[0], " [location file]")
sys.exit(1)
url="http://forecast.weather.gov/MapClick.php?"+data+"FcstType=digitalDWML"
forecast = urlopen(url).read()
|
Debug control flow and exit on errors
|
Debug control flow and exit on errors
|
Python
|
mit
|
robbystk/weather
|
from os.path import expanduser,isfile
- from sys import argv
+ import sys
from urllib import urlopen
location_path="~/.location"
def location_from_homedir():
if isfile(expanduser(location_path)):
with open(expanduser(location_path)) as f:
return "&".join(f.read().split("\n"))
else:
print("no location file at ", location_path)
+ sys.exit(2)
+ def location_from_file(location_file):
+ try:
+ f = open(expanduser(location_file),'r')
+ except:
+ print("file ", location_file, " not found\nLooking in home directory")
+ return location_from_homedir()
- def location_from_file(file):
- try:
- f = open(expanduser(file),'r')
- except:
- print("file ", location_file, " not found")
- location_from_homedir
-
- if len(argv) == 1:
+ if len(sys.argv) == 1:
# not given location file
data = location_from_homedir()
- elif len(argv) == 2:
+ elif len(sys.argv) == 2:
# given location file
- data = location_from_file(argv[1])
+ data = location_from_file(sys.argv[1])
else:
# wrong number of arguments
- print("Usage: ", argv[0], " [location file]")
+ print("Usage: ", sys.argv[0], " [location file]")
+ sys.exit(1)
url="http://forecast.weather.gov/MapClick.php?"+data+"FcstType=digitalDWML"
forecast = urlopen(url).read()
|
Debug control flow and exit on errors
|
## Code Before:
from os.path import expanduser,isfile
from sys import argv
from urllib import urlopen
location_path="~/.location"
def location_from_homedir():
if isfile(expanduser(location_path)):
with open(expanduser(location_path)) as f:
return "&".join(f.read().split("\n"))
else:
print("no location file at ", location_path)
def location_from_file(file):
try:
f = open(expanduser(file),'r')
except:
print("file ", location_file, " not found")
location_from_homedir
if len(argv) == 1:
# not given location file
data = location_from_homedir()
elif len(argv) == 2:
# given location file
data = location_from_file(argv[1])
else:
# wrong number of arguments
print("Usage: ", argv[0], " [location file]")
url="http://forecast.weather.gov/MapClick.php?"+data+"FcstType=digitalDWML"
forecast = urlopen(url).read()
## Instruction:
Debug control flow and exit on errors
## Code After:
from os.path import expanduser,isfile
import sys
from urllib import urlopen
location_path="~/.location"
def location_from_homedir():
if isfile(expanduser(location_path)):
with open(expanduser(location_path)) as f:
return "&".join(f.read().split("\n"))
else:
print("no location file at ", location_path)
sys.exit(2)
def location_from_file(location_file):
try:
f = open(expanduser(location_file),'r')
except:
print("file ", location_file, " not found\nLooking in home directory")
return location_from_homedir()
if len(sys.argv) == 1:
# not given location file
data = location_from_homedir()
elif len(sys.argv) == 2:
# given location file
data = location_from_file(sys.argv[1])
else:
# wrong number of arguments
print("Usage: ", sys.argv[0], " [location file]")
sys.exit(1)
url="http://forecast.weather.gov/MapClick.php?"+data+"FcstType=digitalDWML"
forecast = urlopen(url).read()
|
from os.path import expanduser,isfile
- from sys import argv
+ import sys
from urllib import urlopen
location_path="~/.location"
def location_from_homedir():
if isfile(expanduser(location_path)):
with open(expanduser(location_path)) as f:
return "&".join(f.read().split("\n"))
else:
print("no location file at ", location_path)
+ sys.exit(2)
+ def location_from_file(location_file):
+ try:
+ f = open(expanduser(location_file),'r')
+ except:
+ print("file ", location_file, " not found\nLooking in home directory")
+ return location_from_homedir()
- def location_from_file(file):
- try:
- f = open(expanduser(file),'r')
- except:
- print("file ", location_file, " not found")
- location_from_homedir
-
- if len(argv) == 1:
+ if len(sys.argv) == 1:
? ++++
# not given location file
data = location_from_homedir()
- elif len(argv) == 2:
+ elif len(sys.argv) == 2:
? ++++
# given location file
- data = location_from_file(argv[1])
+ data = location_from_file(sys.argv[1])
? ++++
else:
# wrong number of arguments
- print("Usage: ", argv[0], " [location file]")
+ print("Usage: ", sys.argv[0], " [location file]")
? ++++
+ sys.exit(1)
url="http://forecast.weather.gov/MapClick.php?"+data+"FcstType=digitalDWML"
forecast = urlopen(url).read()
|
d60b16912cc3aa5c0a4f231b63b564683b2b8f64
|
parameters/enums.py
|
parameters/enums.py
|
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_PROTOCOL',
default='https',
kind='str',
verbose_name=_('default protocol: https'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
|
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_URL_PROTOCOL',
default='https',
kind='str',
verbose_name=_('Default url protocol'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
|
Rename parameter DEFAULT_PROTOCOL to DEFAULT_URL_PROTOCOL
|
Rename parameter DEFAULT_PROTOCOL to DEFAULT_URL_PROTOCOL
|
Python
|
mit
|
magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3
|
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
- name='DEFAULT_PROTOCOL',
+ name='DEFAULT_URL_PROTOCOL',
default='https',
kind='str',
- verbose_name=_('default protocol: https'),
+ verbose_name=_('Default url protocol'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
|
Rename parameter DEFAULT_PROTOCOL to DEFAULT_URL_PROTOCOL
|
## Code Before:
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_PROTOCOL',
default='https',
kind='str',
verbose_name=_('default protocol: https'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
## Instruction:
Rename parameter DEFAULT_PROTOCOL to DEFAULT_URL_PROTOCOL
## Code After:
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_URL_PROTOCOL',
default='https',
kind='str',
verbose_name=_('Default url protocol'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
|
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
- name='DEFAULT_PROTOCOL',
+ name='DEFAULT_URL_PROTOCOL',
? ++++
default='https',
kind='str',
- verbose_name=_('default protocol: https'),
? ^ -------
+ verbose_name=_('Default url protocol'),
? ^ ++++
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
|
9830a8d3cf140af5af53918db51ede4b82392dd5
|
dbcollection/datasets/mscoco/load_data_test.py
|
dbcollection/datasets/mscoco/load_data_test.py
|
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['id']] = {
"width" : annot['width'],
"height" : annot['height'],
"filename" : os.path.join(image_dir, annot['file_name'])
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [data, category_list, supercategory_list]}
|
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['file_name']] = {
"file_name" : os.path.join(image_dir, annot['file_name']),
"width" : annot['width'],
"height" : annot['height'],
"id" : annot['id'],
"coco_url" : annot['coco_url'],
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory'],
"id" : annot['id']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [sorted(data), annotations, category_list, supercategory_list]}
|
Add annotations var to returned data
|
db: Add annotations var to returned data
|
Python
|
mit
|
dbcollection/dbcollection,farrajota/dbcollection
|
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
- data[annot['id']] = {
+ data[annot['file_name']] = {
+ "file_name" : os.path.join(image_dir, annot['file_name']),
"width" : annot['width'],
"height" : annot['height'],
- "filename" : os.path.join(image_dir, annot['file_name'])
+ "id" : annot['id'],
+ "coco_url" : annot['coco_url'],
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
- "supercategory" : annot['supercategory']
+ "supercategory" : annot['supercategory'],
+ "id" : annot['id']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
- return {set_name : [data, category_list, supercategory_list]}
+ return {set_name : [sorted(data), annotations, category_list, supercategory_list]}
+
|
Add annotations var to returned data
|
## Code Before:
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['id']] = {
"width" : annot['width'],
"height" : annot['height'],
"filename" : os.path.join(image_dir, annot['file_name'])
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [data, category_list, supercategory_list]}
## Instruction:
Add annotations var to returned data
## Code After:
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['file_name']] = {
"file_name" : os.path.join(image_dir, annot['file_name']),
"width" : annot['width'],
"height" : annot['height'],
"id" : annot['id'],
"coco_url" : annot['coco_url'],
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory'],
"id" : annot['id']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [sorted(data), annotations, category_list, supercategory_list]}
|
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
- data[annot['id']] = {
? ^
+ data[annot['file_name']] = {
? + ^^^^^^^
+ "file_name" : os.path.join(image_dir, annot['file_name']),
"width" : annot['width'],
"height" : annot['height'],
- "filename" : os.path.join(image_dir, annot['file_name'])
+ "id" : annot['id'],
+ "coco_url" : annot['coco_url'],
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
- "supercategory" : annot['supercategory']
+ "supercategory" : annot['supercategory'],
? +
+ "id" : annot['id']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
- return {set_name : [data, category_list, supercategory_list]}
+ return {set_name : [sorted(data), annotations, category_list, supercategory_list]}
? +++++++ ++++++++++++++
|
e2fa4b150546be4b4f0ae59f18ef6ba2b6180d1a
|
accounts/serializers.py
|
accounts/serializers.py
|
"""Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
'id',
'username',
'email',
'website',
'avatar',
'steamid',
'is_staff',
)
|
"""Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
"id",
"username",
"email",
"website",
"avatar_url",
"steamid",
"is_staff",
)
|
Change avatar to avatar_url in the user API
|
Change avatar to avatar_url in the user API
|
Python
|
agpl-3.0
|
lutris/website,lutris/website,lutris/website,lutris/website
|
"""Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
+
model = User
fields = (
- 'id',
+ "id",
- 'username',
+ "username",
- 'email',
+ "email",
- 'website',
+ "website",
- 'avatar',
+ "avatar_url",
- 'steamid',
+ "steamid",
- 'is_staff',
+ "is_staff",
)
|
Change avatar to avatar_url in the user API
|
## Code Before:
"""Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
'id',
'username',
'email',
'website',
'avatar',
'steamid',
'is_staff',
)
## Instruction:
Change avatar to avatar_url in the user API
## Code After:
"""Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
"id",
"username",
"email",
"website",
"avatar_url",
"steamid",
"is_staff",
)
|
"""Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
+
model = User
fields = (
- 'id',
? ^ ^
+ "id",
? ^ ^
- 'username',
? ^ ^
+ "username",
? ^ ^
- 'email',
? ^ ^
+ "email",
? ^ ^
- 'website',
? ^ ^
+ "website",
? ^ ^
- 'avatar',
? ^ ^
+ "avatar_url",
? ^ ^^^^^
- 'steamid',
? ^ ^
+ "steamid",
? ^ ^
- 'is_staff',
? ^ ^
+ "is_staff",
? ^ ^
)
|
256bfb9e2d04fbd03ec2f4d3551e8a9d5ae11766
|
cardbox/deck_urls.py
|
cardbox/deck_urls.py
|
from django.conf.urls import patterns, include, url
import deck_views
urlpatterns = patterns('',
url(r'^$', deck_views.index, name='index'),
)
|
from django.conf.urls import patterns, include, url
import deck_views
urlpatterns = patterns('',
url(r'^$',
deck_views.DeckList.as_view(template_name="cardbox/deck/deck_list.html"),
name='deck_list'),
url(r'^new$', deck_views.DeckCreate.as_view(
template_name="cardbox/deck/deck_form.html"), name='deck_new'),
url(r'^edit/(?P<pk>\d+)$', deck_views.DeckUpdate.as_view(
template_name="cardbox/deck/deck_form.html"), name='deck_edit'),
url(r'^delete/(?P<pk>\d+)$', deck_views.DeckDelete.as_view(
template_name="cardbox/deck/deck_confirm_delete.html"),
name='deck_delete'),
url(r'^detail/(?P<pk>\d+)/$', deck_views.DeckDetailView.as_view(
template_name="cardbox/deck/deck_detail.html"), name='deck_detail')
)
|
Add URL rules for deck CRUD
|
Add URL rules for deck CRUD
|
Python
|
mit
|
DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune
|
from django.conf.urls import patterns, include, url
import deck_views
urlpatterns = patterns('',
- url(r'^$', deck_views.index, name='index'),
+ url(r'^$',
+ deck_views.DeckList.as_view(template_name="cardbox/deck/deck_list.html"),
+ name='deck_list'),
+ url(r'^new$', deck_views.DeckCreate.as_view(
+ template_name="cardbox/deck/deck_form.html"), name='deck_new'),
+ url(r'^edit/(?P<pk>\d+)$', deck_views.DeckUpdate.as_view(
+ template_name="cardbox/deck/deck_form.html"), name='deck_edit'),
+ url(r'^delete/(?P<pk>\d+)$', deck_views.DeckDelete.as_view(
+ template_name="cardbox/deck/deck_confirm_delete.html"),
+ name='deck_delete'),
+ url(r'^detail/(?P<pk>\d+)/$', deck_views.DeckDetailView.as_view(
+ template_name="cardbox/deck/deck_detail.html"), name='deck_detail')
)
|
Add URL rules for deck CRUD
|
## Code Before:
from django.conf.urls import patterns, include, url
import deck_views
urlpatterns = patterns('',
url(r'^$', deck_views.index, name='index'),
)
## Instruction:
Add URL rules for deck CRUD
## Code After:
from django.conf.urls import patterns, include, url
import deck_views
urlpatterns = patterns('',
url(r'^$',
deck_views.DeckList.as_view(template_name="cardbox/deck/deck_list.html"),
name='deck_list'),
url(r'^new$', deck_views.DeckCreate.as_view(
template_name="cardbox/deck/deck_form.html"), name='deck_new'),
url(r'^edit/(?P<pk>\d+)$', deck_views.DeckUpdate.as_view(
template_name="cardbox/deck/deck_form.html"), name='deck_edit'),
url(r'^delete/(?P<pk>\d+)$', deck_views.DeckDelete.as_view(
template_name="cardbox/deck/deck_confirm_delete.html"),
name='deck_delete'),
url(r'^detail/(?P<pk>\d+)/$', deck_views.DeckDetailView.as_view(
template_name="cardbox/deck/deck_detail.html"), name='deck_detail')
)
|
from django.conf.urls import patterns, include, url
import deck_views
urlpatterns = patterns('',
- url(r'^$', deck_views.index, name='index'),
+ url(r'^$',
+ deck_views.DeckList.as_view(template_name="cardbox/deck/deck_list.html"),
+ name='deck_list'),
+ url(r'^new$', deck_views.DeckCreate.as_view(
+ template_name="cardbox/deck/deck_form.html"), name='deck_new'),
+ url(r'^edit/(?P<pk>\d+)$', deck_views.DeckUpdate.as_view(
+ template_name="cardbox/deck/deck_form.html"), name='deck_edit'),
+ url(r'^delete/(?P<pk>\d+)$', deck_views.DeckDelete.as_view(
+ template_name="cardbox/deck/deck_confirm_delete.html"),
+ name='deck_delete'),
+ url(r'^detail/(?P<pk>\d+)/$', deck_views.DeckDetailView.as_view(
+ template_name="cardbox/deck/deck_detail.html"), name='deck_detail')
)
|
6f8a19c46a1d8b6b31039f212e733cd660551de7
|
mws/apis/__init__.py
|
mws/apis/__init__.py
|
from .feeds import Feeds
from .finances import Finances
from .inbound_shipments import InboundShipments
from .inventory import Inventory
from .merchant_fulfillment import MerchantFulfillment
from .offamazonpayments import OffAmazonPayments
from .orders import Orders
from .products import Products
from .recommendations import Recommendations
from .reports import Reports
from .sellers import Sellers
from .outbound_shipments import OutboundShipments
__all__ = [
'Feeds',
'Finances',
'InboundShipments',
'Inventory',
'MerchantFulfillment',
'OffAmazonPayments',
'Orders',
'OutboundShipments',
'Products',
'Recommendations',
'Reports',
'Sellers',
]
|
from .feeds import Feeds
from .finances import Finances
from .inbound_shipments import InboundShipments
from .inventory import Inventory
from .merchant_fulfillment import MerchantFulfillment
from .offamazonpayments import OffAmazonPayments
from .orders import Orders
from .outbound_shipments import OutboundShipments
from .products import Products
from .recommendations import Recommendations
from .reports import Reports
from .sellers import Sellers
from .subscriptions import Subscriptions
__all__ = [
'Feeds',
'Finances',
'InboundShipments',
'Inventory',
'MerchantFulfillment',
'OffAmazonPayments',
'Orders',
'OutboundShipments',
'Products',
'Recommendations',
'Reports',
'Sellers',
'Subscriptions',
]
|
Include the new Subscriptions stub
|
Include the new Subscriptions stub
|
Python
|
unlicense
|
Bobspadger/python-amazon-mws,GriceTurrble/python-amazon-mws
|
from .feeds import Feeds
from .finances import Finances
from .inbound_shipments import InboundShipments
from .inventory import Inventory
from .merchant_fulfillment import MerchantFulfillment
from .offamazonpayments import OffAmazonPayments
from .orders import Orders
+ from .outbound_shipments import OutboundShipments
from .products import Products
from .recommendations import Recommendations
from .reports import Reports
from .sellers import Sellers
- from .outbound_shipments import OutboundShipments
+ from .subscriptions import Subscriptions
__all__ = [
'Feeds',
'Finances',
'InboundShipments',
'Inventory',
'MerchantFulfillment',
'OffAmazonPayments',
'Orders',
'OutboundShipments',
'Products',
'Recommendations',
'Reports',
'Sellers',
+ 'Subscriptions',
]
|
Include the new Subscriptions stub
|
## Code Before:
from .feeds import Feeds
from .finances import Finances
from .inbound_shipments import InboundShipments
from .inventory import Inventory
from .merchant_fulfillment import MerchantFulfillment
from .offamazonpayments import OffAmazonPayments
from .orders import Orders
from .products import Products
from .recommendations import Recommendations
from .reports import Reports
from .sellers import Sellers
from .outbound_shipments import OutboundShipments
__all__ = [
'Feeds',
'Finances',
'InboundShipments',
'Inventory',
'MerchantFulfillment',
'OffAmazonPayments',
'Orders',
'OutboundShipments',
'Products',
'Recommendations',
'Reports',
'Sellers',
]
## Instruction:
Include the new Subscriptions stub
## Code After:
from .feeds import Feeds
from .finances import Finances
from .inbound_shipments import InboundShipments
from .inventory import Inventory
from .merchant_fulfillment import MerchantFulfillment
from .offamazonpayments import OffAmazonPayments
from .orders import Orders
from .outbound_shipments import OutboundShipments
from .products import Products
from .recommendations import Recommendations
from .reports import Reports
from .sellers import Sellers
from .subscriptions import Subscriptions
__all__ = [
'Feeds',
'Finances',
'InboundShipments',
'Inventory',
'MerchantFulfillment',
'OffAmazonPayments',
'Orders',
'OutboundShipments',
'Products',
'Recommendations',
'Reports',
'Sellers',
'Subscriptions',
]
|
from .feeds import Feeds
from .finances import Finances
from .inbound_shipments import InboundShipments
from .inventory import Inventory
from .merchant_fulfillment import MerchantFulfillment
from .offamazonpayments import OffAmazonPayments
from .orders import Orders
+ from .outbound_shipments import OutboundShipments
from .products import Products
from .recommendations import Recommendations
from .reports import Reports
from .sellers import Sellers
- from .outbound_shipments import OutboundShipments
+ from .subscriptions import Subscriptions
__all__ = [
'Feeds',
'Finances',
'InboundShipments',
'Inventory',
'MerchantFulfillment',
'OffAmazonPayments',
'Orders',
'OutboundShipments',
'Products',
'Recommendations',
'Reports',
'Sellers',
+ 'Subscriptions',
]
|
9a49ce93428d6e7bdfeebbed906a1868dd844169
|
anycluster/urls.py
|
anycluster/urls.py
|
from django.conf.urls import patterns, url
from anycluster import views
from django.conf import settings
urlpatterns = patterns('',
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
)
|
from django.conf.urls import url
from anycluster import views
from django.conf import settings
urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
]
|
Update url format to support Django 1.10
|
Update url format to support Django 1.10
|
Python
|
mit
|
biodiv/anycluster,biodiv/anycluster,biodiv/anycluster,biodiv/anycluster,biodiv/anycluster
|
- from django.conf.urls import patterns, url
+ from django.conf.urls import url
from anycluster import views
from django.conf import settings
- urlpatterns = patterns('',
+ urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
- )
+ ]
|
Update url format to support Django 1.10
|
## Code Before:
from django.conf.urls import patterns, url
from anycluster import views
from django.conf import settings
urlpatterns = patterns('',
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
)
## Instruction:
Update url format to support Django 1.10
## Code After:
from django.conf.urls import url
from anycluster import views
from django.conf import settings
urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
]
|
- from django.conf.urls import patterns, url
? ----------
+ from django.conf.urls import url
from anycluster import views
from django.conf import settings
- urlpatterns = patterns('',
+ urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
- )
+ ]
|
82d90487a43e309074e5572b6ac529a707345274
|
fileutils.py
|
fileutils.py
|
from io import FileIO, BufferedWriter
__author__ = 'SeomGi, Han'
__credits__ = ['SeomGi, Han']
__copyright__ = 'Copyright 2015, Python Utils Project'
__license__ = 'MIT'
__version__ = '1.0.0'
__maintainer__ = 'SeomGi, Han'
__email__ = '[email protected]'
__status__ = 'Production'
class FileUtils:
def copy_buffered_io_to_file(self, buffered_io, file_path):
with FileIO(file_path, mode='wb') as raw_output_io:
with BufferedWriter(raw_output_io) as writer:
while 1:
line = buffered_io.readline()
if not line:
break
writer.write(line)
buffered_io.close()
|
import os
from io import FileIO, BufferedReader, BufferedWriter
__author__ = 'SeomGi, Han'
__credits__ = ['SeomGi, Han']
__copyright__ = 'Copyright 2015, Python Utils Project'
__license__ = 'MIT'
__version__ = '1.0.0'
__maintainer__ = 'SeomGi, Han'
__email__ = '[email protected]'
__status__ = 'Production'
class FileUtils:
def copy_file_stream_to_file(self, file, to_path):
self.copy_buffered_io_to_file(BufferedReader(file), to_path)
def copy_buffered_io_to_file(self, buffered_io, file_path):
os.makedirs(file_path[:file_path.rfind('/') + 1], exist_ok=True)
with FileIO(file_path, mode='wb') as raw_output_io:
with BufferedWriter(raw_output_io) as writer:
while 1:
line = buffered_io.readline()
if not line:
break
writer.write(line)
buffered_io.close()
|
Add wrapped raw file copy function that use BufferedReader. And add logic to make directory if target directory isn't exist.
|
Add wrapped raw file copy function that use BufferedReader. And add logic to make directory if target directory isn't exist.
|
Python
|
mit
|
iandmyhand/python-utils
|
+ import os
- from io import FileIO, BufferedWriter
+ from io import FileIO, BufferedReader, BufferedWriter
__author__ = 'SeomGi, Han'
__credits__ = ['SeomGi, Han']
__copyright__ = 'Copyright 2015, Python Utils Project'
__license__ = 'MIT'
__version__ = '1.0.0'
__maintainer__ = 'SeomGi, Han'
__email__ = '[email protected]'
__status__ = 'Production'
class FileUtils:
+ def copy_file_stream_to_file(self, file, to_path):
+ self.copy_buffered_io_to_file(BufferedReader(file), to_path)
+
def copy_buffered_io_to_file(self, buffered_io, file_path):
+ os.makedirs(file_path[:file_path.rfind('/') + 1], exist_ok=True)
with FileIO(file_path, mode='wb') as raw_output_io:
with BufferedWriter(raw_output_io) as writer:
while 1:
line = buffered_io.readline()
if not line:
break
writer.write(line)
buffered_io.close()
|
Add wrapped raw file copy function that use BufferedReader. And add logic to make directory if target directory isn't exist.
|
## Code Before:
from io import FileIO, BufferedWriter
__author__ = 'SeomGi, Han'
__credits__ = ['SeomGi, Han']
__copyright__ = 'Copyright 2015, Python Utils Project'
__license__ = 'MIT'
__version__ = '1.0.0'
__maintainer__ = 'SeomGi, Han'
__email__ = '[email protected]'
__status__ = 'Production'
class FileUtils:
def copy_buffered_io_to_file(self, buffered_io, file_path):
with FileIO(file_path, mode='wb') as raw_output_io:
with BufferedWriter(raw_output_io) as writer:
while 1:
line = buffered_io.readline()
if not line:
break
writer.write(line)
buffered_io.close()
## Instruction:
Add wrapped raw file copy function that use BufferedReader. And add logic to make directory if target directory isn't exist.
## Code After:
import os
from io import FileIO, BufferedReader, BufferedWriter
__author__ = 'SeomGi, Han'
__credits__ = ['SeomGi, Han']
__copyright__ = 'Copyright 2015, Python Utils Project'
__license__ = 'MIT'
__version__ = '1.0.0'
__maintainer__ = 'SeomGi, Han'
__email__ = '[email protected]'
__status__ = 'Production'
class FileUtils:
def copy_file_stream_to_file(self, file, to_path):
self.copy_buffered_io_to_file(BufferedReader(file), to_path)
def copy_buffered_io_to_file(self, buffered_io, file_path):
os.makedirs(file_path[:file_path.rfind('/') + 1], exist_ok=True)
with FileIO(file_path, mode='wb') as raw_output_io:
with BufferedWriter(raw_output_io) as writer:
while 1:
line = buffered_io.readline()
if not line:
break
writer.write(line)
buffered_io.close()
|
+ import os
- from io import FileIO, BufferedWriter
+ from io import FileIO, BufferedReader, BufferedWriter
? ++++++++++++++++
__author__ = 'SeomGi, Han'
__credits__ = ['SeomGi, Han']
__copyright__ = 'Copyright 2015, Python Utils Project'
__license__ = 'MIT'
__version__ = '1.0.0'
__maintainer__ = 'SeomGi, Han'
__email__ = '[email protected]'
__status__ = 'Production'
class FileUtils:
+ def copy_file_stream_to_file(self, file, to_path):
+ self.copy_buffered_io_to_file(BufferedReader(file), to_path)
+
def copy_buffered_io_to_file(self, buffered_io, file_path):
+ os.makedirs(file_path[:file_path.rfind('/') + 1], exist_ok=True)
with FileIO(file_path, mode='wb') as raw_output_io:
with BufferedWriter(raw_output_io) as writer:
while 1:
line = buffered_io.readline()
if not line:
break
writer.write(line)
buffered_io.close()
|
c3e2c6f77dffc2ff5874c1bb495e6de119800cf4
|
rx/core/observable/merge.py
|
rx/core/observable/merge.py
|
import rx
from rx import operators as ops
from rx.core import Observable
def _merge(*args) -> Observable:
"""Merges all the observable sequences into a single observable
sequence.
1 - merged = rx.merge(xs, ys, zs)
2 - merged = rx.merge([xs, ys, zs])
Returns:
The observable sequence that merges the elements of the
observable sequences.
"""
sources = args[:]
if isinstance(sources[0], list):
sources = sources[0]
return rx.from_iterable(sources).pipe(ops.merge_all())
|
from typing import Iterable, Union
import rx
from rx import operators as ops
from rx.core import Observable
def _merge(*args: Union[Observable, Iterable[Observable]]) -> Observable:
"""Merges all the observable sequences into a single observable
sequence.
1 - merged = rx.merge(xs, ys, zs)
2 - merged = rx.merge([xs, ys, zs])
Returns:
The observable sequence that merges the elements of the
observable sequences.
"""
sources = args[:]
if isinstance(sources[0], Iterable):
sources = sources[0]
return rx.from_iterable(sources).pipe(ops.merge_all())
|
Fix typing and accept iterable instead of list
|
Fix typing and accept iterable instead of list
|
Python
|
mit
|
ReactiveX/RxPY,ReactiveX/RxPY
|
+ from typing import Iterable, Union
+
import rx
from rx import operators as ops
from rx.core import Observable
- def _merge(*args) -> Observable:
+ def _merge(*args: Union[Observable, Iterable[Observable]]) -> Observable:
"""Merges all the observable sequences into a single observable
sequence.
1 - merged = rx.merge(xs, ys, zs)
2 - merged = rx.merge([xs, ys, zs])
Returns:
The observable sequence that merges the elements of the
observable sequences.
"""
sources = args[:]
- if isinstance(sources[0], list):
+ if isinstance(sources[0], Iterable):
sources = sources[0]
return rx.from_iterable(sources).pipe(ops.merge_all())
-
-
|
Fix typing and accept iterable instead of list
|
## Code Before:
import rx
from rx import operators as ops
from rx.core import Observable
def _merge(*args) -> Observable:
"""Merges all the observable sequences into a single observable
sequence.
1 - merged = rx.merge(xs, ys, zs)
2 - merged = rx.merge([xs, ys, zs])
Returns:
The observable sequence that merges the elements of the
observable sequences.
"""
sources = args[:]
if isinstance(sources[0], list):
sources = sources[0]
return rx.from_iterable(sources).pipe(ops.merge_all())
## Instruction:
Fix typing and accept iterable instead of list
## Code After:
from typing import Iterable, Union
import rx
from rx import operators as ops
from rx.core import Observable
def _merge(*args: Union[Observable, Iterable[Observable]]) -> Observable:
"""Merges all the observable sequences into a single observable
sequence.
1 - merged = rx.merge(xs, ys, zs)
2 - merged = rx.merge([xs, ys, zs])
Returns:
The observable sequence that merges the elements of the
observable sequences.
"""
sources = args[:]
if isinstance(sources[0], Iterable):
sources = sources[0]
return rx.from_iterable(sources).pipe(ops.merge_all())
|
+ from typing import Iterable, Union
+
import rx
from rx import operators as ops
from rx.core import Observable
- def _merge(*args) -> Observable:
+ def _merge(*args: Union[Observable, Iterable[Observable]]) -> Observable:
"""Merges all the observable sequences into a single observable
sequence.
1 - merged = rx.merge(xs, ys, zs)
2 - merged = rx.merge([xs, ys, zs])
Returns:
The observable sequence that merges the elements of the
observable sequences.
"""
sources = args[:]
- if isinstance(sources[0], list):
? ^^^
+ if isinstance(sources[0], Iterable):
? ++++++ ^
sources = sources[0]
return rx.from_iterable(sources).pipe(ops.merge_all())
-
-
|
8e8986a17b7fa38417fe39ec8fbf4e1d3ee43f64
|
arduino_flasher/reset_arduino.py
|
arduino_flasher/reset_arduino.py
|
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(1)
time.sleep(0.2)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
Reset script first pulls pin high
|
Reset script first pulls pin high
|
Python
|
bsd-3-clause
|
Pavlos1/SensoringJMSS,Pavlos1/SensoringJMSS,Pavlos1/SensoringJMSS,Pavlos1/SensoringJMSS
|
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
+ resetPin.write(1)
+ time.sleep(0.2)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
Reset script first pulls pin high
|
## Code Before:
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
## Instruction:
Reset script first pulls pin high
## Code After:
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(1)
time.sleep(0.2)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
+ resetPin.write(1)
+ time.sleep(0.2)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
419131bba11cab27c36ef2b21199cdc3540cde16
|
byceps/services/shop/order/actions/revoke_ticket_bundles.py
|
byceps/services/shop/order/actions/revoke_ticket_bundles.py
|
from .....typing import UserID
from ..transfer.action import ActionParameters
from ..transfer.order import Order
from . import ticket
def revoke_ticket_bundles(
order: Order,
bundle_quantity: int,
initiator_id: UserID,
parameters: ActionParameters,
) -> None:
"""Revoke all ticket bundles in this order."""
ticket.revoke_ticket_bundles(order, initiator_id)
|
from .....typing import UserID
from ..transfer.action import ActionParameters
from ..transfer.order import Order
from . import ticket_bundle
def revoke_ticket_bundles(
order: Order,
bundle_quantity: int,
initiator_id: UserID,
parameters: ActionParameters,
) -> None:
"""Revoke all ticket bundles in this order."""
ticket_bundle.revoke_ticket_bundles(order, initiator_id)
|
Fix ticket bundle revocation order action
|
Fix ticket bundle revocation order action
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
from .....typing import UserID
from ..transfer.action import ActionParameters
from ..transfer.order import Order
- from . import ticket
+ from . import ticket_bundle
def revoke_ticket_bundles(
order: Order,
bundle_quantity: int,
initiator_id: UserID,
parameters: ActionParameters,
) -> None:
"""Revoke all ticket bundles in this order."""
- ticket.revoke_ticket_bundles(order, initiator_id)
+ ticket_bundle.revoke_ticket_bundles(order, initiator_id)
|
Fix ticket bundle revocation order action
|
## Code Before:
from .....typing import UserID
from ..transfer.action import ActionParameters
from ..transfer.order import Order
from . import ticket
def revoke_ticket_bundles(
order: Order,
bundle_quantity: int,
initiator_id: UserID,
parameters: ActionParameters,
) -> None:
"""Revoke all ticket bundles in this order."""
ticket.revoke_ticket_bundles(order, initiator_id)
## Instruction:
Fix ticket bundle revocation order action
## Code After:
from .....typing import UserID
from ..transfer.action import ActionParameters
from ..transfer.order import Order
from . import ticket_bundle
def revoke_ticket_bundles(
order: Order,
bundle_quantity: int,
initiator_id: UserID,
parameters: ActionParameters,
) -> None:
"""Revoke all ticket bundles in this order."""
ticket_bundle.revoke_ticket_bundles(order, initiator_id)
|
from .....typing import UserID
from ..transfer.action import ActionParameters
from ..transfer.order import Order
- from . import ticket
+ from . import ticket_bundle
? +++++++
def revoke_ticket_bundles(
order: Order,
bundle_quantity: int,
initiator_id: UserID,
parameters: ActionParameters,
) -> None:
"""Revoke all ticket bundles in this order."""
- ticket.revoke_ticket_bundles(order, initiator_id)
+ ticket_bundle.revoke_ticket_bundles(order, initiator_id)
? +++++++
|
1cf354d834fbb81260c88718c57533a546fc9dfa
|
src/robots/actions/attitudes.py
|
src/robots/actions/attitudes.py
|
import logging; logger = logging.getLogger("robot." + __name__)
from robots.exception import RobotError
from robots.actions.look_at import sweep
from robots.action import *
###############################################################################
@action
def sorry(robot, speed = 0.5):
return sweep(robot, 45, speed)
|
import logging; logger = logging.getLogger("robot." + __name__)
import random
from robots.exception import RobotError
from robots.lowlevel import *
from robots.actions.look_at import sweep
from robots.action import *
###############################################################################
@action
@workswith(ALL)
def satisfied(robot):
actions = kb_satisfied()
return actions
@action
@same_requirements_as(sweep)
def sorry(robot, speed = 0.5):
actions = kb_sorry()
actions += sweep(robot, 45, speed)
return actions
def _generate_id():
sequence = "abcdefghijklmopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
sample = random.sample(sequence, 5)
return "".join(sample)
def _send_state(state):
state_id = _generate_id()
statements = [state_id + " rdf:type " + state,
"myself experiences " + state_id]
logger.info("Setting my mood to " + state)
return add_knowledge(statements, lifespan=10)
def kb_confused():
return _send_state("ConfusedState")
def kb_satisfied():
return _send_state("SatisfiedState")
def kb_sorry():
return _send_state("SorryState")
def kb_happy():
return _send_state("HappyState")
def kb_angry():
return _send_state("AngryState")
def kb_sad():
return _send_state("SadState")
|
Update the knowledge base according to the emotion
|
[actions/attitude] Update the knowledge base according to the emotion
|
Python
|
isc
|
chili-epfl/pyrobots,chili-epfl/pyrobots-nao
|
import logging; logger = logging.getLogger("robot." + __name__)
+
+ import random
from robots.exception import RobotError
+ from robots.lowlevel import *
from robots.actions.look_at import sweep
from robots.action import *
###############################################################################
@action
+ @workswith(ALL)
+ def satisfied(robot):
+ actions = kb_satisfied()
+ return actions
+
+
+ @action
+ @same_requirements_as(sweep)
def sorry(robot, speed = 0.5):
+ actions = kb_sorry()
- return sweep(robot, 45, speed)
+ actions += sweep(robot, 45, speed)
-
+ return actions
+ def _generate_id():
+ sequence = "abcdefghijklmopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ sample = random.sample(sequence, 5)
+ return "".join(sample)
+
+
+ def _send_state(state):
+
+
+ state_id = _generate_id()
+ statements = [state_id + " rdf:type " + state,
+ "myself experiences " + state_id]
+
+ logger.info("Setting my mood to " + state)
+ return add_knowledge(statements, lifespan=10)
+
+
+ def kb_confused():
+ return _send_state("ConfusedState")
+
+ def kb_satisfied():
+ return _send_state("SatisfiedState")
+
+ def kb_sorry():
+ return _send_state("SorryState")
+
+ def kb_happy():
+ return _send_state("HappyState")
+
+ def kb_angry():
+ return _send_state("AngryState")
+
+ def kb_sad():
+ return _send_state("SadState")
+
+
+
|
Update the knowledge base according to the emotion
|
## Code Before:
import logging; logger = logging.getLogger("robot." + __name__)
from robots.exception import RobotError
from robots.actions.look_at import sweep
from robots.action import *
###############################################################################
@action
def sorry(robot, speed = 0.5):
return sweep(robot, 45, speed)
## Instruction:
Update the knowledge base according to the emotion
## Code After:
import logging; logger = logging.getLogger("robot." + __name__)
import random
from robots.exception import RobotError
from robots.lowlevel import *
from robots.actions.look_at import sweep
from robots.action import *
###############################################################################
@action
@workswith(ALL)
def satisfied(robot):
actions = kb_satisfied()
return actions
@action
@same_requirements_as(sweep)
def sorry(robot, speed = 0.5):
actions = kb_sorry()
actions += sweep(robot, 45, speed)
return actions
def _generate_id():
sequence = "abcdefghijklmopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
sample = random.sample(sequence, 5)
return "".join(sample)
def _send_state(state):
state_id = _generate_id()
statements = [state_id + " rdf:type " + state,
"myself experiences " + state_id]
logger.info("Setting my mood to " + state)
return add_knowledge(statements, lifespan=10)
def kb_confused():
return _send_state("ConfusedState")
def kb_satisfied():
return _send_state("SatisfiedState")
def kb_sorry():
return _send_state("SorryState")
def kb_happy():
return _send_state("HappyState")
def kb_angry():
return _send_state("AngryState")
def kb_sad():
return _send_state("SadState")
|
import logging; logger = logging.getLogger("robot." + __name__)
+
+ import random
from robots.exception import RobotError
+ from robots.lowlevel import *
from robots.actions.look_at import sweep
from robots.action import *
###############################################################################
@action
+ @workswith(ALL)
+ def satisfied(robot):
+ actions = kb_satisfied()
+ return actions
+
+
+ @action
+ @same_requirements_as(sweep)
def sorry(robot, speed = 0.5):
+ actions = kb_sorry()
- return sweep(robot, 45, speed)
? ^^ ^^
+ actions += sweep(robot, 45, speed)
? ^^ ^^ ++++
-
+ return actions
+
+ def _generate_id():
+ sequence = "abcdefghijklmopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ sample = random.sample(sequence, 5)
+ return "".join(sample)
+
+
+ def _send_state(state):
+
+
+ state_id = _generate_id()
+ statements = [state_id + " rdf:type " + state,
+ "myself experiences " + state_id]
+
+ logger.info("Setting my mood to " + state)
+ return add_knowledge(statements, lifespan=10)
+
+
+ def kb_confused():
+ return _send_state("ConfusedState")
+
+ def kb_satisfied():
+ return _send_state("SatisfiedState")
+
+ def kb_sorry():
+ return _send_state("SorryState")
+
+ def kb_happy():
+ return _send_state("HappyState")
+
+ def kb_angry():
+ return _send_state("AngryState")
+
+ def kb_sad():
+ return _send_state("SadState")
+
+
|
d147d8865dc4b82eaff87d0d4dd65ba7f4622a90
|
django/contrib/admin/__init__.py
|
django/contrib/admin/__init__.py
|
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
|
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
git-svn-id: http://code.djangoproject.com/svn/django/trunk@14359 bcc190cf-cafb-0310-a4f2-bffc1f526a37
--HG--
extra : convert_revision : e026073455a73c9fe9a9f026b76ac783b2a12d23
|
Python
|
bsd-3-clause
|
adieu/django-nonrel,heracek/django-nonrel,adieu/django-nonrel,heracek/django-nonrel,adieu/django-nonrel,heracek/django-nonrel
|
+ from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
|
## Code Before:
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
## Instruction:
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
## Code After:
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
+ from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
160f29d42086a10bc38d255d8e03a30b1eb01deb
|
medical_prescription_sale/__openerp__.py
|
medical_prescription_sale/__openerp__.py
|
{
'name': 'Medical Prescription Sales',
'summary': 'Create Sale Orders from Prescriptions',
'version': '9.0.0.1.0',
'author': "LasLabs, Odoo Community Association (OCA)",
'category': 'Medical',
'depends': [
'sale',
'medical_prescription',
'medical_pharmacy',
'medical_prescription_thread',
],
"website": "https://laslabs.com",
"license": "AGPL-3",
"data": [
'data/ir_sequence.xml',
'data/product_category_data.xml',
'wizards/medical_sale_wizard_view.xml',
'wizards/medical_sale_temp_view.xml',
'views/prescription_order_line_view.xml',
'views/prescription_order_view.xml',
'views/sale_order_view.xml',
'views/medical_physician_view.xml',
'views/medical_patient_view.xml',
],
'demo': [
'demo/medical_medicament_demo.xml',
'demo/medical_medication_demo.xml',
'demo/medical_prescription_order_demo.xml',
'demo/medical_prescription_order_line_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
{
'name': 'Medical Prescription Sales',
'summary': 'Create Sale Orders from Prescriptions',
'version': '9.0.0.1.0',
'author': "LasLabs, Odoo Community Association (OCA)",
'category': 'Medical',
'depends': [
'sale',
'stock',
'medical_prescription',
'medical_pharmacy',
'medical_prescription_thread',
],
"website": "https://laslabs.com",
"license": "AGPL-3",
"data": [
'data/ir_sequence.xml',
'data/product_category_data.xml',
'wizards/medical_sale_wizard_view.xml',
'wizards/medical_sale_temp_view.xml',
'views/prescription_order_line_view.xml',
'views/prescription_order_view.xml',
'views/sale_order_view.xml',
'views/medical_physician_view.xml',
'views/medical_patient_view.xml',
],
'demo': [
'demo/medical_medicament_demo.xml',
'demo/medical_medication_demo.xml',
'demo/medical_prescription_order_demo.xml',
'demo/medical_prescription_order_line_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
Add dependency * Add dependency on stock to manifest file. This is needed by some of the demo data in the module, which was not installing due to its absence.
|
[FIX] medical_prescription_sale: Add dependency
* Add dependency on stock to manifest file. This is needed by some of the demo
data in the module, which was not installing due to its absence.
|
Python
|
agpl-3.0
|
laslabs/vertical-medical,laslabs/vertical-medical
|
{
'name': 'Medical Prescription Sales',
'summary': 'Create Sale Orders from Prescriptions',
'version': '9.0.0.1.0',
'author': "LasLabs, Odoo Community Association (OCA)",
'category': 'Medical',
'depends': [
'sale',
+ 'stock',
'medical_prescription',
'medical_pharmacy',
'medical_prescription_thread',
],
"website": "https://laslabs.com",
"license": "AGPL-3",
"data": [
'data/ir_sequence.xml',
'data/product_category_data.xml',
'wizards/medical_sale_wizard_view.xml',
'wizards/medical_sale_temp_view.xml',
'views/prescription_order_line_view.xml',
'views/prescription_order_view.xml',
'views/sale_order_view.xml',
'views/medical_physician_view.xml',
'views/medical_patient_view.xml',
],
'demo': [
'demo/medical_medicament_demo.xml',
'demo/medical_medication_demo.xml',
'demo/medical_prescription_order_demo.xml',
'demo/medical_prescription_order_line_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
Add dependency * Add dependency on stock to manifest file. This is needed by some of the demo data in the module, which was not installing due to its absence.
|
## Code Before:
{
'name': 'Medical Prescription Sales',
'summary': 'Create Sale Orders from Prescriptions',
'version': '9.0.0.1.0',
'author': "LasLabs, Odoo Community Association (OCA)",
'category': 'Medical',
'depends': [
'sale',
'medical_prescription',
'medical_pharmacy',
'medical_prescription_thread',
],
"website": "https://laslabs.com",
"license": "AGPL-3",
"data": [
'data/ir_sequence.xml',
'data/product_category_data.xml',
'wizards/medical_sale_wizard_view.xml',
'wizards/medical_sale_temp_view.xml',
'views/prescription_order_line_view.xml',
'views/prescription_order_view.xml',
'views/sale_order_view.xml',
'views/medical_physician_view.xml',
'views/medical_patient_view.xml',
],
'demo': [
'demo/medical_medicament_demo.xml',
'demo/medical_medication_demo.xml',
'demo/medical_prescription_order_demo.xml',
'demo/medical_prescription_order_line_demo.xml',
],
'installable': True,
'auto_install': False,
}
## Instruction:
Add dependency * Add dependency on stock to manifest file. This is needed by some of the demo data in the module, which was not installing due to its absence.
## Code After:
{
'name': 'Medical Prescription Sales',
'summary': 'Create Sale Orders from Prescriptions',
'version': '9.0.0.1.0',
'author': "LasLabs, Odoo Community Association (OCA)",
'category': 'Medical',
'depends': [
'sale',
'stock',
'medical_prescription',
'medical_pharmacy',
'medical_prescription_thread',
],
"website": "https://laslabs.com",
"license": "AGPL-3",
"data": [
'data/ir_sequence.xml',
'data/product_category_data.xml',
'wizards/medical_sale_wizard_view.xml',
'wizards/medical_sale_temp_view.xml',
'views/prescription_order_line_view.xml',
'views/prescription_order_view.xml',
'views/sale_order_view.xml',
'views/medical_physician_view.xml',
'views/medical_patient_view.xml',
],
'demo': [
'demo/medical_medicament_demo.xml',
'demo/medical_medication_demo.xml',
'demo/medical_prescription_order_demo.xml',
'demo/medical_prescription_order_line_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
{
'name': 'Medical Prescription Sales',
'summary': 'Create Sale Orders from Prescriptions',
'version': '9.0.0.1.0',
'author': "LasLabs, Odoo Community Association (OCA)",
'category': 'Medical',
'depends': [
'sale',
+ 'stock',
'medical_prescription',
'medical_pharmacy',
'medical_prescription_thread',
],
"website": "https://laslabs.com",
"license": "AGPL-3",
"data": [
'data/ir_sequence.xml',
'data/product_category_data.xml',
'wizards/medical_sale_wizard_view.xml',
'wizards/medical_sale_temp_view.xml',
'views/prescription_order_line_view.xml',
'views/prescription_order_view.xml',
'views/sale_order_view.xml',
'views/medical_physician_view.xml',
'views/medical_patient_view.xml',
],
'demo': [
'demo/medical_medicament_demo.xml',
'demo/medical_medication_demo.xml',
'demo/medical_prescription_order_demo.xml',
'demo/medical_prescription_order_line_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
ec771b7186065443e282be84fbeda5897caba913
|
buildbot_travis/steps/base.py
|
buildbot_travis/steps/base.py
|
from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
config = TravisYml()
struct = self.build.getProperty(".travis.yml", None)
if struct:
config.parse(struct)
defer.returnValue(config)
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
self.build.setProperty(".travis.yml", config.config, ".VCS")
defer.returnValue(config)
|
from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
defer.returnValue(config)
|
Revert "Save .travis.yml into build properties"
|
Revert "Save .travis.yml into build properties"
The data is > 1024 so no dice.
This reverts commit 10960fd1465afb8de92e8fd35b1affca4f950e27.
|
Python
|
unknown
|
tardyp/buildbot_travis,tardyp/buildbot_travis,isotoma/buildbot_travis,tardyp/buildbot_travis,buildbot/buildbot_travis,buildbot/buildbot_travis,buildbot/buildbot_travis,tardyp/buildbot_travis,isotoma/buildbot_travis
|
from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
- config = TravisYml()
-
- struct = self.build.getProperty(".travis.yml", None)
- if struct:
- config.parse(struct)
- defer.returnValue(config)
-
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
- self.build.setProperty(".travis.yml", config.config, ".VCS")
-
defer.returnValue(config)
|
Revert "Save .travis.yml into build properties"
|
## Code Before:
from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
config = TravisYml()
struct = self.build.getProperty(".travis.yml", None)
if struct:
config.parse(struct)
defer.returnValue(config)
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
self.build.setProperty(".travis.yml", config.config, ".VCS")
defer.returnValue(config)
## Instruction:
Revert "Save .travis.yml into build properties"
## Code After:
from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
defer.returnValue(config)
|
from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
- config = TravisYml()
-
- struct = self.build.getProperty(".travis.yml", None)
- if struct:
- config.parse(struct)
- defer.returnValue(config)
-
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
- self.build.setProperty(".travis.yml", config.config, ".VCS")
-
defer.returnValue(config)
|
7c4cf25868a907eb3ac718b2d4dbef2966fbe806
|
ext/dcos-installer/dcos_installer/util/__init__.py
|
ext/dcos-installer/dcos_installer/util/__init__.py
|
import logging
import os
CONFIG_PATH = '/genconf/config.yaml'
SSH_KEY_PATH = '/genconf/ssh_key'
IP_DETECT_PATH = '/genconf/ip-detect'
SERVE_DIR = '/genconf/serve'
STATE_DIR = '/genconf/state'
GENCONF_DIR = '/genconf'
log = logging.getLogger(__name__)
def write_file(data, path):
try:
with open(path, 'w') as f:
log.debug("Writing file %s", path)
f.write(data)
except:
log.error("Filed to write path %s", path)
def get_action_state(action_name):
"""
Check the action.json file and if the
success + failed + term == total then we are finished.
If not, return running.
"""
return {
"action_name": "deploy",
"action_state": "running",
"hosts_running": [],
"hosts_success": [],
"hosts_failed": [],
"hosts_terminated": [],
}
def clear_action_jsons():
"""
On startup, remove all the old action.json files (preflight,
postflight, deploy .json). This is because action state is
nullified when the installer shuts down. This way we do not
return inconsistent state in the get_action_state().
"""
pass
def create_directory(path):
if not os.path.exists(path):
os.mkdirs(path)
|
CONFIG_PATH = '/genconf/config.yaml'
SSH_KEY_PATH = '/genconf/ssh_key'
IP_DETECT_PATH = '/genconf/ip-detect'
SERVE_DIR = '/genconf/serve'
STATE_DIR = '/genconf/state'
GENCONF_DIR = '/genconf'
|
Remove dead code in dcos_installer.util
|
Remove dead code in dcos_installer.util
|
Python
|
apache-2.0
|
lingmann/dcos,xinxian0458/dcos,amitaekbote/dcos,branden/dcos,dcos/dcos,xinxian0458/dcos,surdy/dcos,lingmann/dcos,mesosphere-mergebot/dcos,jeid64/dcos,surdy/dcos,mesosphere-mergebot/mergebot-test-dcos,darkonie/dcos,mnaboka/dcos,mnaboka/dcos,dcos/dcos,jeid64/dcos,vishnu2kmohan/dcos,GoelDeepak/dcos,darkonie/dcos,asridharan/dcos,branden/dcos,dcos/dcos,BenWhitehead/dcos,jeid64/dcos,GoelDeepak/dcos,vishnu2kmohan/dcos,BenWhitehead/dcos,mesosphere-mergebot/dcos,mesosphere-mergebot/mergebot-test-dcos,dcos/dcos,jeid64/dcos,vishnu2kmohan/dcos,amitaekbote/dcos,kensipe/dcos,kensipe/dcos,asridharan/dcos,darkonie/dcos,mellenburg/dcos,GoelDeepak/dcos,dcos/dcos,GoelDeepak/dcos,mellenburg/dcos,BenWhitehead/dcos,lingmann/dcos,branden/dcos,kensipe/dcos,mesosphere-mergebot/mergebot-test-dcos,xinxian0458/dcos,mnaboka/dcos,surdy/dcos,amitaekbote/dcos,mnaboka/dcos,mellenburg/dcos,vishnu2kmohan/dcos,mellenburg/dcos,darkonie/dcos,mesosphere-mergebot/dcos,asridharan/dcos,surdy/dcos,mesosphere-mergebot/mergebot-test-dcos,asridharan/dcos,BenWhitehead/dcos,xinxian0458/dcos,darkonie/dcos,kensipe/dcos,lingmann/dcos,amitaekbote/dcos,branden/dcos,mesosphere-mergebot/dcos,mnaboka/dcos
|
- import logging
- import os
-
CONFIG_PATH = '/genconf/config.yaml'
SSH_KEY_PATH = '/genconf/ssh_key'
IP_DETECT_PATH = '/genconf/ip-detect'
SERVE_DIR = '/genconf/serve'
STATE_DIR = '/genconf/state'
GENCONF_DIR = '/genconf'
- log = logging.getLogger(__name__)
-
-
- def write_file(data, path):
- try:
- with open(path, 'w') as f:
- log.debug("Writing file %s", path)
- f.write(data)
- except:
- log.error("Filed to write path %s", path)
-
-
- def get_action_state(action_name):
- """
- Check the action.json file and if the
- success + failed + term == total then we are finished.
- If not, return running.
- """
- return {
- "action_name": "deploy",
- "action_state": "running",
- "hosts_running": [],
- "hosts_success": [],
- "hosts_failed": [],
- "hosts_terminated": [],
- }
-
-
- def clear_action_jsons():
- """
- On startup, remove all the old action.json files (preflight,
- postflight, deploy .json). This is because action state is
- nullified when the installer shuts down. This way we do not
- return inconsistent state in the get_action_state().
- """
- pass
-
-
- def create_directory(path):
- if not os.path.exists(path):
- os.mkdirs(path)
-
|
Remove dead code in dcos_installer.util
|
## Code Before:
import logging
import os
CONFIG_PATH = '/genconf/config.yaml'
SSH_KEY_PATH = '/genconf/ssh_key'
IP_DETECT_PATH = '/genconf/ip-detect'
SERVE_DIR = '/genconf/serve'
STATE_DIR = '/genconf/state'
GENCONF_DIR = '/genconf'
log = logging.getLogger(__name__)
def write_file(data, path):
try:
with open(path, 'w') as f:
log.debug("Writing file %s", path)
f.write(data)
except:
log.error("Filed to write path %s", path)
def get_action_state(action_name):
"""
Check the action.json file and if the
success + failed + term == total then we are finished.
If not, return running.
"""
return {
"action_name": "deploy",
"action_state": "running",
"hosts_running": [],
"hosts_success": [],
"hosts_failed": [],
"hosts_terminated": [],
}
def clear_action_jsons():
"""
On startup, remove all the old action.json files (preflight,
postflight, deploy .json). This is because action state is
nullified when the installer shuts down. This way we do not
return inconsistent state in the get_action_state().
"""
pass
def create_directory(path):
if not os.path.exists(path):
os.mkdirs(path)
## Instruction:
Remove dead code in dcos_installer.util
## Code After:
CONFIG_PATH = '/genconf/config.yaml'
SSH_KEY_PATH = '/genconf/ssh_key'
IP_DETECT_PATH = '/genconf/ip-detect'
SERVE_DIR = '/genconf/serve'
STATE_DIR = '/genconf/state'
GENCONF_DIR = '/genconf'
|
- import logging
- import os
-
CONFIG_PATH = '/genconf/config.yaml'
SSH_KEY_PATH = '/genconf/ssh_key'
IP_DETECT_PATH = '/genconf/ip-detect'
SERVE_DIR = '/genconf/serve'
STATE_DIR = '/genconf/state'
GENCONF_DIR = '/genconf'
-
- log = logging.getLogger(__name__)
-
-
- def write_file(data, path):
- try:
- with open(path, 'w') as f:
- log.debug("Writing file %s", path)
- f.write(data)
- except:
- log.error("Filed to write path %s", path)
-
-
- def get_action_state(action_name):
- """
- Check the action.json file and if the
- success + failed + term == total then we are finished.
- If not, return running.
- """
- return {
- "action_name": "deploy",
- "action_state": "running",
- "hosts_running": [],
- "hosts_success": [],
- "hosts_failed": [],
- "hosts_terminated": [],
- }
-
-
- def clear_action_jsons():
- """
- On startup, remove all the old action.json files (preflight,
- postflight, deploy .json). This is because action state is
- nullified when the installer shuts down. This way we do not
- return inconsistent state in the get_action_state().
- """
- pass
-
-
- def create_directory(path):
- if not os.path.exists(path):
- os.mkdirs(path)
|
51b362ab66ed8a1a793dc9995a6f06067230085b
|
geomdl/__init__.py
|
geomdl/__init__.py
|
__version__ = "3.0.0"
# Fixes "from geomdl import *" but this is not considered as a good practice
# @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
__all__ = ["BSpline.Curve",
"BSpline.Curve2D",
"BSpline.Surface",
"NURBS.Curve",
"NURBS.Curve2D",
"NURBS.Surface",
"CPGen.Grid",
"CPGen.GridWeighted",
"utilities"]
|
__version__ = "3.0.0"
|
Disable import * as it seems to cause some unnecessary trouble
|
Disable import * as it seems to cause some unnecessary trouble
|
Python
|
mit
|
orbingol/NURBS-Python,orbingol/NURBS-Python
|
__version__ = "3.0.0"
- # Fixes "from geomdl import *" but this is not considered as a good practice
- # @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
- __all__ = ["BSpline.Curve",
- "BSpline.Curve2D",
- "BSpline.Surface",
- "NURBS.Curve",
- "NURBS.Curve2D",
- "NURBS.Surface",
- "CPGen.Grid",
- "CPGen.GridWeighted",
- "utilities"]
-
|
Disable import * as it seems to cause some unnecessary trouble
|
## Code Before:
__version__ = "3.0.0"
# Fixes "from geomdl import *" but this is not considered as a good practice
# @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
__all__ = ["BSpline.Curve",
"BSpline.Curve2D",
"BSpline.Surface",
"NURBS.Curve",
"NURBS.Curve2D",
"NURBS.Surface",
"CPGen.Grid",
"CPGen.GridWeighted",
"utilities"]
## Instruction:
Disable import * as it seems to cause some unnecessary trouble
## Code After:
__version__ = "3.0.0"
|
__version__ = "3.0.0"
-
- # Fixes "from geomdl import *" but this is not considered as a good practice
- # @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
- __all__ = ["BSpline.Curve",
- "BSpline.Curve2D",
- "BSpline.Surface",
- "NURBS.Curve",
- "NURBS.Curve2D",
- "NURBS.Surface",
- "CPGen.Grid",
- "CPGen.GridWeighted",
- "utilities"]
|
172c0123d5ce59ce4f162d806fc706dc50eb4312
|
distarray/tests/test_client.py
|
distarray/tests/test_client.py
|
import unittest
import numpy as np
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
self.dap = self.dac.fromndarray(np.arange(100))
def test_getitem(self):
self.assertEqual(self.dap[55], 55)
def test_setitem(self):
self.dap[35] = 9999
print self.dap[35]
if __name__ == '__main__':
unittest.main(verbosity=2)
|
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_(self):
dap = self.dac.empty((100,))
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
Test DAP getitem and setitem together.
|
Test DAP getitem and setitem together.
|
Python
|
bsd-3-clause
|
RaoUmer/distarray,enthought/distarray,RaoUmer/distarray,enthought/distarray
|
import unittest
- import numpy as np
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
- self.dap = self.dac.fromndarray(np.arange(100))
+ def test_set_and_getitem_(self):
+ dap = self.dac.empty((100,))
- def test_getitem(self):
- self.assertEqual(self.dap[55], 55)
+ for val in xrange(100):
+ dap[val] = val
+ for val in xrange(100):
+ self.assertEqual(dap[val], val)
- def test_setitem(self):
- self.dap[35] = 9999
- print self.dap[35]
if __name__ == '__main__':
unittest.main(verbosity=2)
|
Test DAP getitem and setitem together.
|
## Code Before:
import unittest
import numpy as np
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
self.dap = self.dac.fromndarray(np.arange(100))
def test_getitem(self):
self.assertEqual(self.dap[55], 55)
def test_setitem(self):
self.dap[35] = 9999
print self.dap[35]
if __name__ == '__main__':
unittest.main(verbosity=2)
## Instruction:
Test DAP getitem and setitem together.
## Code After:
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_(self):
dap = self.dac.empty((100,))
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
import unittest
- import numpy as np
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
- self.dap = self.dac.fromndarray(np.arange(100))
+ def test_set_and_getitem_(self):
+ dap = self.dac.empty((100,))
- def test_getitem(self):
- self.assertEqual(self.dap[55], 55)
+ for val in xrange(100):
+ dap[val] = val
+ for val in xrange(100):
+ self.assertEqual(dap[val], val)
- def test_setitem(self):
- self.dap[35] = 9999
- print self.dap[35]
if __name__ == '__main__':
unittest.main(verbosity=2)
|
724338c55d0af6d38a949b58a90ae200849247f4
|
cyinterval/test/test_interval_set.py
|
cyinterval/test/test_interval_set.py
|
from cyinterval.cyinterval import Interval, IntervalSet
from nose.tools import assert_equal
def test_interval_set_construction():
interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.))
assert_equal(interval_set.intervals[0], Interval(0.,1.))
assert_equal(interval_set.intervals[1], Interval(2.,3.))
if __name__ == '__main__':
import sys
import nose
# This code will run the test in this file.'
module_name = sys.modules[__name__].__file__
result = nose.run(argv=[sys.argv[0],
module_name,
'-s', '-v'])
|
from cyinterval.cyinterval import Interval, IntervalSet, FloatIntervalSet
from nose.tools import assert_equal, assert_is
def test_float_interval_set_construction():
interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.))
assert_equal(interval_set.intervals[0], Interval(0.,1.))
assert_equal(interval_set.intervals[1], Interval(2.,3.))
assert_is(type(interval_set), FloatIntervalSet)
if __name__ == '__main__':
import sys
import nose
# This code will run the test in this file.'
module_name = sys.modules[__name__].__file__
result = nose.run(argv=[sys.argv[0],
module_name,
'-s', '-v'])
|
Test type of IntervalSet factory output
|
Test type of IntervalSet factory output
|
Python
|
mit
|
jcrudy/cyinterval
|
- from cyinterval.cyinterval import Interval, IntervalSet
+ from cyinterval.cyinterval import Interval, IntervalSet, FloatIntervalSet
- from nose.tools import assert_equal
+ from nose.tools import assert_equal, assert_is
- def test_interval_set_construction():
+ def test_float_interval_set_construction():
interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.))
assert_equal(interval_set.intervals[0], Interval(0.,1.))
assert_equal(interval_set.intervals[1], Interval(2.,3.))
+ assert_is(type(interval_set), FloatIntervalSet)
if __name__ == '__main__':
import sys
import nose
# This code will run the test in this file.'
module_name = sys.modules[__name__].__file__
result = nose.run(argv=[sys.argv[0],
module_name,
'-s', '-v'])
|
Test type of IntervalSet factory output
|
## Code Before:
from cyinterval.cyinterval import Interval, IntervalSet
from nose.tools import assert_equal
def test_interval_set_construction():
interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.))
assert_equal(interval_set.intervals[0], Interval(0.,1.))
assert_equal(interval_set.intervals[1], Interval(2.,3.))
if __name__ == '__main__':
import sys
import nose
# This code will run the test in this file.'
module_name = sys.modules[__name__].__file__
result = nose.run(argv=[sys.argv[0],
module_name,
'-s', '-v'])
## Instruction:
Test type of IntervalSet factory output
## Code After:
from cyinterval.cyinterval import Interval, IntervalSet, FloatIntervalSet
from nose.tools import assert_equal, assert_is
def test_float_interval_set_construction():
interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.))
assert_equal(interval_set.intervals[0], Interval(0.,1.))
assert_equal(interval_set.intervals[1], Interval(2.,3.))
assert_is(type(interval_set), FloatIntervalSet)
if __name__ == '__main__':
import sys
import nose
# This code will run the test in this file.'
module_name = sys.modules[__name__].__file__
result = nose.run(argv=[sys.argv[0],
module_name,
'-s', '-v'])
|
- from cyinterval.cyinterval import Interval, IntervalSet
+ from cyinterval.cyinterval import Interval, IntervalSet, FloatIntervalSet
? ++++++++++++++++++
- from nose.tools import assert_equal
+ from nose.tools import assert_equal, assert_is
? +++++++++++
- def test_interval_set_construction():
+ def test_float_interval_set_construction():
? ++++++
interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.))
assert_equal(interval_set.intervals[0], Interval(0.,1.))
assert_equal(interval_set.intervals[1], Interval(2.,3.))
+ assert_is(type(interval_set), FloatIntervalSet)
if __name__ == '__main__':
import sys
import nose
# This code will run the test in this file.'
module_name = sys.modules[__name__].__file__
result = nose.run(argv=[sys.argv[0],
module_name,
'-s', '-v'])
|
abaa882aaa1b7e251d989d60391bd2e06801c2a2
|
py/desiUtil/install/most_recent_tag.py
|
py/desiUtil/install/most_recent_tag.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
def most_recent_tag(tags,username=None):
"""Scan an SVN tags directory and return the most recent tag.
Parameters
----------
tags : str
A URL pointing to an SVN tags directory.
username : str, optional
If set, pass the value to SVN's ``--username`` option.
Returns
-------
most_recent_tag : str
The most recent tag found in ``tags``.
"""
from subprocess import Popen, PIPE
command = ['svn']
if username is not None:
command += ['--username', username]
command += ['ls',tags]
proc = Popen(command,stdout=PIPE,stderr=PIPE)
out, err = proc.communicate()
try:
mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0])[-1]
except IndexError:
mrt = '0.0.0'
return mrt
|
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
def most_recent_tag(tags,username=None):
"""Scan an SVN tags directory and return the most recent tag.
Parameters
----------
tags : str
A URL pointing to an SVN tags directory.
username : str, optional
If set, pass the value to SVN's ``--username`` option.
Returns
-------
most_recent_tag : str
The most recent tag found in ``tags``.
"""
from distutils.version import StrictVersion as V
from subprocess import Popen, PIPE
command = ['svn']
if username is not None:
command += ['--username', username]
command += ['ls',tags]
proc = Popen(command,stdout=PIPE,stderr=PIPE)
out, err = proc.communicate()
try:
mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0],
key=lambda x: V(x))[-1]
except IndexError:
mrt = '0.0.0'
return mrt
|
Add more careful version checks
|
Add more careful version checks
|
Python
|
bsd-3-clause
|
desihub/desiutil,desihub/desiutil
|
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
def most_recent_tag(tags,username=None):
"""Scan an SVN tags directory and return the most recent tag.
Parameters
----------
tags : str
A URL pointing to an SVN tags directory.
username : str, optional
If set, pass the value to SVN's ``--username`` option.
Returns
-------
most_recent_tag : str
The most recent tag found in ``tags``.
"""
+ from distutils.version import StrictVersion as V
from subprocess import Popen, PIPE
command = ['svn']
if username is not None:
command += ['--username', username]
command += ['ls',tags]
proc = Popen(command,stdout=PIPE,stderr=PIPE)
out, err = proc.communicate()
try:
- mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0])[-1]
+ mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0],
+ key=lambda x: V(x))[-1]
except IndexError:
mrt = '0.0.0'
return mrt
|
Add more careful version checks
|
## Code Before:
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
def most_recent_tag(tags,username=None):
"""Scan an SVN tags directory and return the most recent tag.
Parameters
----------
tags : str
A URL pointing to an SVN tags directory.
username : str, optional
If set, pass the value to SVN's ``--username`` option.
Returns
-------
most_recent_tag : str
The most recent tag found in ``tags``.
"""
from subprocess import Popen, PIPE
command = ['svn']
if username is not None:
command += ['--username', username]
command += ['ls',tags]
proc = Popen(command,stdout=PIPE,stderr=PIPE)
out, err = proc.communicate()
try:
mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0])[-1]
except IndexError:
mrt = '0.0.0'
return mrt
## Instruction:
Add more careful version checks
## Code After:
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
def most_recent_tag(tags,username=None):
"""Scan an SVN tags directory and return the most recent tag.
Parameters
----------
tags : str
A URL pointing to an SVN tags directory.
username : str, optional
If set, pass the value to SVN's ``--username`` option.
Returns
-------
most_recent_tag : str
The most recent tag found in ``tags``.
"""
from distutils.version import StrictVersion as V
from subprocess import Popen, PIPE
command = ['svn']
if username is not None:
command += ['--username', username]
command += ['ls',tags]
proc = Popen(command,stdout=PIPE,stderr=PIPE)
out, err = proc.communicate()
try:
mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0],
key=lambda x: V(x))[-1]
except IndexError:
mrt = '0.0.0'
return mrt
|
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
def most_recent_tag(tags,username=None):
"""Scan an SVN tags directory and return the most recent tag.
Parameters
----------
tags : str
A URL pointing to an SVN tags directory.
username : str, optional
If set, pass the value to SVN's ``--username`` option.
Returns
-------
most_recent_tag : str
The most recent tag found in ``tags``.
"""
+ from distutils.version import StrictVersion as V
from subprocess import Popen, PIPE
command = ['svn']
if username is not None:
command += ['--username', username]
command += ['ls',tags]
proc = Popen(command,stdout=PIPE,stderr=PIPE)
out, err = proc.communicate()
try:
- mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0])[-1]
? ^^^^^
+ mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0],
? ^
+ key=lambda x: V(x))[-1]
except IndexError:
mrt = '0.0.0'
return mrt
|
edd5adc9be2a700421bd8e98af825322796b8714
|
dns/models.py
|
dns/models.py
|
from google.appengine.ext import db
TOP_LEVEL_DOMAINS = 'com net org biz info'.split()
class Lookup(db.Model):
"""
The datastore key name is the domain name, without top level.
IP address fields use 0 (zero) for NXDOMAIN because None is
returned for missing properties.
Updates since 2010-01-01 use negative numbers for 60 bit hashes of
the SOA server name, see tools/update_dns.py.
"""
backwards = db.StringProperty(required=True) # For suffix matching.
timestamp = db.DateTimeProperty(required=True) # Created or updated.
com = db.IntegerProperty(indexed=False)
net = db.IntegerProperty(indexed=False)
org = db.IntegerProperty(indexed=False)
biz = db.IntegerProperty(indexed=False)
info = db.IntegerProperty(indexed=False)
|
from google.appengine.ext import db
TOP_LEVEL_DOMAINS = """
com net org biz info
ag am at
be by
ch ck
de
es eu
fm
in io is it
la li ly
me mobi ms
name
ru
se sh sy
tel th to travel tv
us
""".split()
# Omitting nu, ph, st, ws because they don't seem to have NXDOMAIN.
class UpgradeStringProperty(db.IntegerProperty):
def validate(self, value):
return unicode(value) if value else u''
class Lookup(db.Expando):
"""
The datastore key name is the domain name, without top level.
IP address fields use 0 (zero) for NXDOMAIN because None is
returned for missing properties.
Some updates on 2010-01-01 use negative numbers for 60 bit hashes of
the SOA server name.
Since 2010-01-02, this model inherits from Expando to flexibly add
more top level domains. Each property stores the authority name
server as string backwards, e.g. com.1and1.ns1 for better sorting.
"""
backwards = db.StringProperty(required=True) # For suffix matching.
timestamp = db.DateTimeProperty(required=True) # Created or updated.
com = UpgradeStringProperty()
net = UpgradeStringProperty()
org = UpgradeStringProperty()
biz = UpgradeStringProperty()
info = UpgradeStringProperty()
|
Upgrade Lookup model to Expando and DNS result properties from integer to string.
|
Upgrade Lookup model to Expando and DNS result properties from integer to string.
|
Python
|
mit
|
jcrocholl/nxdom,jcrocholl/nxdom
|
from google.appengine.ext import db
- TOP_LEVEL_DOMAINS = 'com net org biz info'.split()
+ TOP_LEVEL_DOMAINS = """
+ com net org biz info
+ ag am at
+ be by
+ ch ck
+ de
+ es eu
+ fm
+ in io is it
+ la li ly
+ me mobi ms
+ name
+ ru
+ se sh sy
+ tel th to travel tv
+ us
+ """.split()
+
+ # Omitting nu, ph, st, ws because they don't seem to have NXDOMAIN.
+ class UpgradeStringProperty(db.IntegerProperty):
+
+ def validate(self, value):
+ return unicode(value) if value else u''
+
+
+
- class Lookup(db.Model):
+ class Lookup(db.Expando):
"""
The datastore key name is the domain name, without top level.
IP address fields use 0 (zero) for NXDOMAIN because None is
returned for missing properties.
- Updates since 2010-01-01 use negative numbers for 60 bit hashes of
+ Some updates on 2010-01-01 use negative numbers for 60 bit hashes of
- the SOA server name, see tools/update_dns.py.
+ the SOA server name.
+
+ Since 2010-01-02, this model inherits from Expando to flexibly add
+ more top level domains. Each property stores the authority name
+ server as string backwards, e.g. com.1and1.ns1 for better sorting.
"""
backwards = db.StringProperty(required=True) # For suffix matching.
timestamp = db.DateTimeProperty(required=True) # Created or updated.
- com = db.IntegerProperty(indexed=False)
- net = db.IntegerProperty(indexed=False)
- org = db.IntegerProperty(indexed=False)
- biz = db.IntegerProperty(indexed=False)
- info = db.IntegerProperty(indexed=False)
+ com = UpgradeStringProperty()
+ net = UpgradeStringProperty()
+ org = UpgradeStringProperty()
+ biz = UpgradeStringProperty()
+ info = UpgradeStringProperty()
|
Upgrade Lookup model to Expando and DNS result properties from integer to string.
|
## Code Before:
from google.appengine.ext import db
TOP_LEVEL_DOMAINS = 'com net org biz info'.split()
class Lookup(db.Model):
"""
The datastore key name is the domain name, without top level.
IP address fields use 0 (zero) for NXDOMAIN because None is
returned for missing properties.
Updates since 2010-01-01 use negative numbers for 60 bit hashes of
the SOA server name, see tools/update_dns.py.
"""
backwards = db.StringProperty(required=True) # For suffix matching.
timestamp = db.DateTimeProperty(required=True) # Created or updated.
com = db.IntegerProperty(indexed=False)
net = db.IntegerProperty(indexed=False)
org = db.IntegerProperty(indexed=False)
biz = db.IntegerProperty(indexed=False)
info = db.IntegerProperty(indexed=False)
## Instruction:
Upgrade Lookup model to Expando and DNS result properties from integer to string.
## Code After:
from google.appengine.ext import db
TOP_LEVEL_DOMAINS = """
com net org biz info
ag am at
be by
ch ck
de
es eu
fm
in io is it
la li ly
me mobi ms
name
ru
se sh sy
tel th to travel tv
us
""".split()
# Omitting nu, ph, st, ws because they don't seem to have NXDOMAIN.
class UpgradeStringProperty(db.IntegerProperty):
def validate(self, value):
return unicode(value) if value else u''
class Lookup(db.Expando):
"""
The datastore key name is the domain name, without top level.
IP address fields use 0 (zero) for NXDOMAIN because None is
returned for missing properties.
Some updates on 2010-01-01 use negative numbers for 60 bit hashes of
the SOA server name.
Since 2010-01-02, this model inherits from Expando to flexibly add
more top level domains. Each property stores the authority name
server as string backwards, e.g. com.1and1.ns1 for better sorting.
"""
backwards = db.StringProperty(required=True) # For suffix matching.
timestamp = db.DateTimeProperty(required=True) # Created or updated.
com = UpgradeStringProperty()
net = UpgradeStringProperty()
org = UpgradeStringProperty()
biz = UpgradeStringProperty()
info = UpgradeStringProperty()
|
from google.appengine.ext import db
- TOP_LEVEL_DOMAINS = 'com net org biz info'.split()
+ TOP_LEVEL_DOMAINS = """
+ com net org biz info
+ ag am at
+ be by
+ ch ck
+ de
+ es eu
+ fm
+ in io is it
+ la li ly
+ me mobi ms
+ name
+ ru
+ se sh sy
+ tel th to travel tv
+ us
+ """.split()
+
+ # Omitting nu, ph, st, ws because they don't seem to have NXDOMAIN.
+ class UpgradeStringProperty(db.IntegerProperty):
+
+ def validate(self, value):
+ return unicode(value) if value else u''
+
+
+
- class Lookup(db.Model):
? ^ ---
+ class Lookup(db.Expando):
? ^^^^^^
"""
The datastore key name is the domain name, without top level.
IP address fields use 0 (zero) for NXDOMAIN because None is
returned for missing properties.
- Updates since 2010-01-01 use negative numbers for 60 bit hashes of
? ^ ^^ --
+ Some updates on 2010-01-01 use negative numbers for 60 bit hashes of
? ^^^^^^ ^
- the SOA server name, see tools/update_dns.py.
+ the SOA server name.
+
+ Since 2010-01-02, this model inherits from Expando to flexibly add
+ more top level domains. Each property stores the authority name
+ server as string backwards, e.g. com.1and1.ns1 for better sorting.
"""
backwards = db.StringProperty(required=True) # For suffix matching.
timestamp = db.DateTimeProperty(required=True) # Created or updated.
- com = db.IntegerProperty(indexed=False)
- net = db.IntegerProperty(indexed=False)
- org = db.IntegerProperty(indexed=False)
- biz = db.IntegerProperty(indexed=False)
- info = db.IntegerProperty(indexed=False)
+ com = UpgradeStringProperty()
+ net = UpgradeStringProperty()
+ org = UpgradeStringProperty()
+ biz = UpgradeStringProperty()
+ info = UpgradeStringProperty()
|
fb53f2ed0e6337d6f5766f47cb67c204c89c0568
|
src/oauth2client/__init__.py
|
src/oauth2client/__init__.py
|
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.1.3'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code'
GOOGLE_REVOKE_URI = 'https://oauth2.googleapis.com/revoke'
GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token'
GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
|
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.1.3'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code'
GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token'
GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
|
Fix oauth2 revoke URI, new URL doesn't seem to work
|
Fix oauth2 revoke URI, new URL doesn't seem to work
|
Python
|
apache-2.0
|
GAM-team/GAM,GAM-team/GAM
|
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.1.3'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code'
- GOOGLE_REVOKE_URI = 'https://oauth2.googleapis.com/revoke'
+ GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token'
GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
|
Fix oauth2 revoke URI, new URL doesn't seem to work
|
## Code Before:
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.1.3'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code'
GOOGLE_REVOKE_URI = 'https://oauth2.googleapis.com/revoke'
GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token'
GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
## Instruction:
Fix oauth2 revoke URI, new URL doesn't seem to work
## Code After:
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.1.3'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code'
GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token'
GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
|
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.1.3'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code'
- GOOGLE_REVOKE_URI = 'https://oauth2.googleapis.com/revoke'
? - ^^ ----
+ GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
? +++ + ^ +++++++++
GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token'
GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
|
1468d6e257d4f22f803549606cbd3e3245c2ce37
|
redash/utils/comparators.py
|
redash/utils/comparators.py
|
from sqlalchemy import func
from sqlalchemy.ext.hybrid import Comparator
class CaseInsensitiveComparator(Comparator):
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
|
from sqlalchemy import String
class CaseInsensitiveComparator(String.Comparator):
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
|
Change CaseInsensitiveComparator to support all operations.
|
Change CaseInsensitiveComparator to support all operations.
|
Python
|
bsd-2-clause
|
moritz9/redash,44px/redash,getredash/redash,alexanderlz/redash,alexanderlz/redash,moritz9/redash,44px/redash,44px/redash,denisov-vlad/redash,getredash/redash,getredash/redash,denisov-vlad/redash,getredash/redash,denisov-vlad/redash,chriszs/redash,denisov-vlad/redash,44px/redash,alexanderlz/redash,getredash/redash,chriszs/redash,alexanderlz/redash,moritz9/redash,denisov-vlad/redash,chriszs/redash,chriszs/redash,moritz9/redash
|
- from sqlalchemy import func
+ from sqlalchemy import String
- from sqlalchemy.ext.hybrid import Comparator
- class CaseInsensitiveComparator(Comparator):
+ class CaseInsensitiveComparator(String.Comparator):
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
|
Change CaseInsensitiveComparator to support all operations.
|
## Code Before:
from sqlalchemy import func
from sqlalchemy.ext.hybrid import Comparator
class CaseInsensitiveComparator(Comparator):
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
## Instruction:
Change CaseInsensitiveComparator to support all operations.
## Code After:
from sqlalchemy import String
class CaseInsensitiveComparator(String.Comparator):
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
|
- from sqlalchemy import func
? ^^ ^
+ from sqlalchemy import String
? ^^^^ ^
- from sqlalchemy.ext.hybrid import Comparator
- class CaseInsensitiveComparator(Comparator):
+ class CaseInsensitiveComparator(String.Comparator):
? +++++++
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
|
d423668902a87c17e73f3521e58571709c9b9283
|
td_biblio/urls.py
|
td_biblio/urls.py
|
from django.conf.urls import url, patterns
from .views import EntryListView
urlpatterns = patterns(
'',
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
)
|
from django.conf.urls import url
from .views import EntryListView
urlpatterns = [
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
]
|
Switch to django new url schema
|
Switch to django new url schema
|
Python
|
mit
|
TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio
|
- from django.conf.urls import url, patterns
+ from django.conf.urls import url
from .views import EntryListView
+ urlpatterns = [
- urlpatterns = patterns(
- '',
-
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
- )
+ ]
|
Switch to django new url schema
|
## Code Before:
from django.conf.urls import url, patterns
from .views import EntryListView
urlpatterns = patterns(
'',
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
)
## Instruction:
Switch to django new url schema
## Code After:
from django.conf.urls import url
from .views import EntryListView
urlpatterns = [
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
]
|
- from django.conf.urls import url, patterns
? ----------
+ from django.conf.urls import url
from .views import EntryListView
+ urlpatterns = [
- urlpatterns = patterns(
- '',
-
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
- )
+ ]
|
66e16d6e3d80ab81967232d5d154c64c8e277def
|
robotpy_ext/misc/periodic_filter.py
|
robotpy_ext/misc/periodic_filter.py
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno > logging.INFO
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
Allow user to select bypass level
|
Allow user to select bypass level
|
Python
|
bsd-3-clause
|
Twinters007/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
- def __init__(self, period):
+ def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
+ self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
- return self.parent.loggingLoop or record.levelno > logging.INFO
+ return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
Allow user to select bypass level
|
## Code Before:
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno > logging.INFO
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
## Instruction:
Allow user to select bypass level
## Code After:
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
- def __init__(self, period):
+ def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
+ self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
- return self.parent.loggingLoop or record.levelno > logging.INFO
? ^^^^^^ ^^^^
+ return self.parent.loggingLoop or record.levelno >= self.bypassLevel
? + ++ ^ ^^^^^^^^^^^
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
1aa050f2d50fb206ffb1a7d06e75cc2ba27cc91b
|
1.py
|
1.py
|
i = input()
floor = 0
for x in range(0, len(i)):
if i[x] == '(':
floor +=1;
elif i[x] == ')':
floor -=1;
print(floor)
|
i = input()
floor = 0
instruction = 0
for x in range(0, len(i)):
if i[x] == '(':
floor +=1
elif i[x] == ')':
floor -=1
if (floor < 0 and instruction == 0):
instruction = x+1
print("floor: %s" % floor)
print("basement entry: %s" % instruction)
|
Add second part of puzzle
|
Add second part of puzzle
|
Python
|
mit
|
Walther/adventofcode,Walther/adventofcode,Walther/adventofcode
|
i = input()
floor = 0
+ instruction = 0
for x in range(0, len(i)):
if i[x] == '(':
- floor +=1;
+ floor +=1
elif i[x] == ')':
- floor -=1;
+ floor -=1
- print(floor)
+ if (floor < 0 and instruction == 0):
+ instruction = x+1
+ print("floor: %s" % floor)
+ print("basement entry: %s" % instruction)
|
Add second part of puzzle
|
## Code Before:
i = input()
floor = 0
for x in range(0, len(i)):
if i[x] == '(':
floor +=1;
elif i[x] == ')':
floor -=1;
print(floor)
## Instruction:
Add second part of puzzle
## Code After:
i = input()
floor = 0
instruction = 0
for x in range(0, len(i)):
if i[x] == '(':
floor +=1
elif i[x] == ')':
floor -=1
if (floor < 0 and instruction == 0):
instruction = x+1
print("floor: %s" % floor)
print("basement entry: %s" % instruction)
|
i = input()
floor = 0
+ instruction = 0
for x in range(0, len(i)):
if i[x] == '(':
- floor +=1;
? -
+ floor +=1
elif i[x] == ')':
- floor -=1;
? -
+ floor -=1
- print(floor)
+ if (floor < 0 and instruction == 0):
+ instruction = x+1
+ print("floor: %s" % floor)
+ print("basement entry: %s" % instruction)
|
d604d17e8286b1c95a0faafd6d4fd79af11441ab
|
nn/util.py
|
nn/util.py
|
import functools
import numpy
import tensorflow as tf
def static_shape(tensor):
return tf.convert_to_tensor(tensor).get_shape().as_list()
def static_rank(tensor):
return len(static_shape(tf.convert_to_tensor(tensor)))
def funcname_scope(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.variable_scope(func.__name__):
return func(*args, **kwargs)
return wrapper
def on_device(device_name):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.device(device_name):
return func(*args, **kwargs)
return wrapper
return decorator
def dimension_indices(tensor, start=0):
return list(range(static_rank(tensor)))[start:]
@funcname_scope
def dtype_min(dtype):
return tf.constant(_numpy_min(dtype.as_numpy_dtype))
def _numpy_min(dtype):
return numpy.finfo(dtype).min
@funcname_scope
def dtype_epsilon(dtype):
return tf.constant(_numpy_epsilon(dtype.as_numpy_dtype))
def _numpy_epsilon(dtype):
return numpy.finfo(dtype).eps
def flatten(x):
return tf.reshape(x, [-1])
|
import functools
import numpy
import tensorflow as tf
def static_shape(tensor):
return tf.convert_to_tensor(tensor).get_shape().as_list()
def static_rank(tensor):
return len(static_shape(tf.convert_to_tensor(tensor)))
def funcname_scope(func_or_name):
if isinstance(func_or_name, str):
def wrapper(func):
func.__name__ = func_or_name
return funcname_scope(func)
return wrapper
func = func_or_name
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.variable_scope(func.__name__):
return func(*args, **kwargs)
return wrapper
def on_device(device_name):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.device(device_name):
return func(*args, **kwargs)
return wrapper
return decorator
def dimension_indices(tensor, start=0):
return list(range(static_rank(tensor)))[start:]
@funcname_scope
def dtype_min(dtype):
return tf.constant(_numpy_min(dtype.as_numpy_dtype))
def _numpy_min(dtype):
return numpy.finfo(dtype).min
@funcname_scope
def dtype_epsilon(dtype):
return tf.constant(_numpy_epsilon(dtype.as_numpy_dtype))
def _numpy_epsilon(dtype):
return numpy.finfo(dtype).eps
def flatten(x):
return tf.reshape(x, [-1])
|
Extend funcname_scope so that it accepts funcnames
|
Extend funcname_scope so that it accepts funcnames
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
import functools
import numpy
import tensorflow as tf
def static_shape(tensor):
return tf.convert_to_tensor(tensor).get_shape().as_list()
def static_rank(tensor):
return len(static_shape(tf.convert_to_tensor(tensor)))
- def funcname_scope(func):
+ def funcname_scope(func_or_name):
+ if isinstance(func_or_name, str):
+ def wrapper(func):
+ func.__name__ = func_or_name
+ return funcname_scope(func)
+ return wrapper
+
+ func = func_or_name
+
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.variable_scope(func.__name__):
return func(*args, **kwargs)
return wrapper
def on_device(device_name):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.device(device_name):
return func(*args, **kwargs)
return wrapper
return decorator
def dimension_indices(tensor, start=0):
return list(range(static_rank(tensor)))[start:]
@funcname_scope
def dtype_min(dtype):
return tf.constant(_numpy_min(dtype.as_numpy_dtype))
def _numpy_min(dtype):
return numpy.finfo(dtype).min
@funcname_scope
def dtype_epsilon(dtype):
return tf.constant(_numpy_epsilon(dtype.as_numpy_dtype))
def _numpy_epsilon(dtype):
return numpy.finfo(dtype).eps
def flatten(x):
return tf.reshape(x, [-1])
|
Extend funcname_scope so that it accepts funcnames
|
## Code Before:
import functools
import numpy
import tensorflow as tf
def static_shape(tensor):
return tf.convert_to_tensor(tensor).get_shape().as_list()
def static_rank(tensor):
return len(static_shape(tf.convert_to_tensor(tensor)))
def funcname_scope(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.variable_scope(func.__name__):
return func(*args, **kwargs)
return wrapper
def on_device(device_name):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.device(device_name):
return func(*args, **kwargs)
return wrapper
return decorator
def dimension_indices(tensor, start=0):
return list(range(static_rank(tensor)))[start:]
@funcname_scope
def dtype_min(dtype):
return tf.constant(_numpy_min(dtype.as_numpy_dtype))
def _numpy_min(dtype):
return numpy.finfo(dtype).min
@funcname_scope
def dtype_epsilon(dtype):
return tf.constant(_numpy_epsilon(dtype.as_numpy_dtype))
def _numpy_epsilon(dtype):
return numpy.finfo(dtype).eps
def flatten(x):
return tf.reshape(x, [-1])
## Instruction:
Extend funcname_scope so that it accepts funcnames
## Code After:
import functools
import numpy
import tensorflow as tf
def static_shape(tensor):
return tf.convert_to_tensor(tensor).get_shape().as_list()
def static_rank(tensor):
return len(static_shape(tf.convert_to_tensor(tensor)))
def funcname_scope(func_or_name):
if isinstance(func_or_name, str):
def wrapper(func):
func.__name__ = func_or_name
return funcname_scope(func)
return wrapper
func = func_or_name
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.variable_scope(func.__name__):
return func(*args, **kwargs)
return wrapper
def on_device(device_name):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.device(device_name):
return func(*args, **kwargs)
return wrapper
return decorator
def dimension_indices(tensor, start=0):
return list(range(static_rank(tensor)))[start:]
@funcname_scope
def dtype_min(dtype):
return tf.constant(_numpy_min(dtype.as_numpy_dtype))
def _numpy_min(dtype):
return numpy.finfo(dtype).min
@funcname_scope
def dtype_epsilon(dtype):
return tf.constant(_numpy_epsilon(dtype.as_numpy_dtype))
def _numpy_epsilon(dtype):
return numpy.finfo(dtype).eps
def flatten(x):
return tf.reshape(x, [-1])
|
import functools
import numpy
import tensorflow as tf
def static_shape(tensor):
return tf.convert_to_tensor(tensor).get_shape().as_list()
def static_rank(tensor):
return len(static_shape(tf.convert_to_tensor(tensor)))
- def funcname_scope(func):
+ def funcname_scope(func_or_name):
? ++++++++
+ if isinstance(func_or_name, str):
+ def wrapper(func):
+ func.__name__ = func_or_name
+ return funcname_scope(func)
+ return wrapper
+
+ func = func_or_name
+
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.variable_scope(func.__name__):
return func(*args, **kwargs)
return wrapper
def on_device(device_name):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.device(device_name):
return func(*args, **kwargs)
return wrapper
return decorator
def dimension_indices(tensor, start=0):
return list(range(static_rank(tensor)))[start:]
@funcname_scope
def dtype_min(dtype):
return tf.constant(_numpy_min(dtype.as_numpy_dtype))
def _numpy_min(dtype):
return numpy.finfo(dtype).min
@funcname_scope
def dtype_epsilon(dtype):
return tf.constant(_numpy_epsilon(dtype.as_numpy_dtype))
def _numpy_epsilon(dtype):
return numpy.finfo(dtype).eps
def flatten(x):
return tf.reshape(x, [-1])
|
e3dcbe5fb142b7ce564a90cf127de418d0a62db3
|
src/sentry/runner/hacks.py
|
src/sentry/runner/hacks.py
|
from __future__ import absolute_import, print_function
from sentry.http import get_server_hostname
class AllowedHosts(object):
# HACK: This is a fake stub for settings.ALLOWED_HOSTS
# This is needing since ALLOWED_HOSTS is engrained
# in Django internals, so we want this "tuple" to respond
# to runtime changes based on our system.url-prefix Option
def __iter__(self):
yield get_server_hostname() or '*'
|
from __future__ import absolute_import, print_function
from sentry.http import get_server_hostname
class AllowedHosts(object):
# HACK: This is a fake stub for settings.ALLOWED_HOSTS
# This is needing since ALLOWED_HOSTS is engrained
# in Django internals, so we want this "tuple" to respond
# to runtime changes based on our system.url-prefix Option
def __iter__(self):
yield get_server_hostname() or '*'
def __repr__(self):
return repr(tuple(self))
|
Add a nice repr for AllowedHosts object so the admin makes sense
|
Add a nice repr for AllowedHosts object so the admin makes sense
|
Python
|
bsd-3-clause
|
fotinakis/sentry,daevaorn/sentry,beeftornado/sentry,daevaorn/sentry,ifduyue/sentry,alexm92/sentry,ifduyue/sentry,JamesMura/sentry,JamesMura/sentry,alexm92/sentry,jean/sentry,BuildingLink/sentry,JackDanger/sentry,gencer/sentry,nicholasserra/sentry,jean/sentry,ifduyue/sentry,mvaled/sentry,ifduyue/sentry,zenefits/sentry,zenefits/sentry,fotinakis/sentry,JamesMura/sentry,gencer/sentry,nicholasserra/sentry,gencer/sentry,mvaled/sentry,looker/sentry,looker/sentry,BuildingLink/sentry,mvaled/sentry,jean/sentry,beeftornado/sentry,mvaled/sentry,JackDanger/sentry,fotinakis/sentry,JamesMura/sentry,mitsuhiko/sentry,looker/sentry,daevaorn/sentry,JamesMura/sentry,mvaled/sentry,mitsuhiko/sentry,ifduyue/sentry,daevaorn/sentry,jean/sentry,nicholasserra/sentry,BuildingLink/sentry,zenefits/sentry,jean/sentry,zenefits/sentry,BuildingLink/sentry,zenefits/sentry,gencer/sentry,looker/sentry,fotinakis/sentry,mvaled/sentry,looker/sentry,JackDanger/sentry,BuildingLink/sentry,gencer/sentry,beeftornado/sentry,alexm92/sentry
|
from __future__ import absolute_import, print_function
from sentry.http import get_server_hostname
class AllowedHosts(object):
# HACK: This is a fake stub for settings.ALLOWED_HOSTS
# This is needing since ALLOWED_HOSTS is engrained
# in Django internals, so we want this "tuple" to respond
# to runtime changes based on our system.url-prefix Option
def __iter__(self):
yield get_server_hostname() or '*'
+ def __repr__(self):
+ return repr(tuple(self))
+
|
Add a nice repr for AllowedHosts object so the admin makes sense
|
## Code Before:
from __future__ import absolute_import, print_function
from sentry.http import get_server_hostname
class AllowedHosts(object):
# HACK: This is a fake stub for settings.ALLOWED_HOSTS
# This is needing since ALLOWED_HOSTS is engrained
# in Django internals, so we want this "tuple" to respond
# to runtime changes based on our system.url-prefix Option
def __iter__(self):
yield get_server_hostname() or '*'
## Instruction:
Add a nice repr for AllowedHosts object so the admin makes sense
## Code After:
from __future__ import absolute_import, print_function
from sentry.http import get_server_hostname
class AllowedHosts(object):
# HACK: This is a fake stub for settings.ALLOWED_HOSTS
# This is needing since ALLOWED_HOSTS is engrained
# in Django internals, so we want this "tuple" to respond
# to runtime changes based on our system.url-prefix Option
def __iter__(self):
yield get_server_hostname() or '*'
def __repr__(self):
return repr(tuple(self))
|
from __future__ import absolute_import, print_function
from sentry.http import get_server_hostname
class AllowedHosts(object):
# HACK: This is a fake stub for settings.ALLOWED_HOSTS
# This is needing since ALLOWED_HOSTS is engrained
# in Django internals, so we want this "tuple" to respond
# to runtime changes based on our system.url-prefix Option
def __iter__(self):
yield get_server_hostname() or '*'
+
+ def __repr__(self):
+ return repr(tuple(self))
|
0d7db5b134aaed2721dff8f7be300878fffa0c44
|
genestack_client/__init__.py
|
genestack_client/__init__.py
|
import sys
if not ((2, 7, 5) <= sys.version_info < (3, 0)):
sys.stderr.write(
'Python version "%s" is not supported. Required version 2.7.5+, Python 3 is not supported\n' % sys.version
)
exit(1)
from version import __version__
from genestack_exceptions import (GenestackAuthenticationException, GenestackBaseException,
GenestackConnectionFailure, GenestackException,
GenestackResponseError, GenestackServerException,
GenestackVersionException)
from genestack_connection import Connection, Application
from file_types import FileTypes
from file_permissions import Permissions
from metainfo_scalar_values import *
from bio_meta_keys import BioMetaKeys
from genestack_metainfo import Metainfo
from bio_metainfo import BioMetainfo
from data_importer import DataImporter
from file_initializer import FileInitializer
from genome_query import GenomeQuery
from utils import get_connection, get_user, make_connection_parser, validate_constant
from file_filters import *
from share_util import ShareUtil
from files_util import FilesUtil, MatchType, SortOrder, SpecialFolders
from datasets_util import DatasetsUtil
from groups_util import GroupsUtil
from organization_util import OrganizationUtil
from task_log_viewer import TaskLogViewer
from cla import *
from expression_navigator import *
|
import sys
if not ((2, 7, 5) <= sys.version_info < (3, 0)):
sys.stderr.write(
'Python version "%s" is not supported. Required version 2.7.5+, Python 3 is not supported\n' % sys.version
)
exit(1)
from version import __version__
from genestack_exceptions import (GenestackAuthenticationException, GenestackBaseException,
GenestackConnectionFailure, GenestackException,
GenestackResponseError, GenestackServerException,
GenestackVersionException)
from genestack_connection import Connection, Application
from file_types import FileTypes
from file_permissions import Permissions
from metainfo_scalar_values import *
from bio_meta_keys import BioMetaKeys
from genestack_metainfo import Metainfo
from bio_metainfo import BioMetainfo
from data_importer import DataImporter
from file_initializer import FileInitializer
from genome_query import GenomeQuery
from utils import get_connection, get_user, make_connection_parser, validate_constant
from file_filters import *
from share_util import ShareUtil
from files_util import FilesUtil, SortOrder, SpecialFolders
from datasets_util import DatasetsUtil
from groups_util import GroupsUtil
from organization_util import OrganizationUtil
from task_log_viewer import TaskLogViewer
from cla import *
from expression_navigator import *
|
Delete findMetainfoRelatedTerms method from Genestack Core findMetainfoRelatedTerms is deleted
|
[JBKB-996] Delete findMetainfoRelatedTerms method from Genestack Core
findMetainfoRelatedTerms is deleted
|
Python
|
mit
|
genestack/python-client
|
import sys
if not ((2, 7, 5) <= sys.version_info < (3, 0)):
sys.stderr.write(
'Python version "%s" is not supported. Required version 2.7.5+, Python 3 is not supported\n' % sys.version
)
exit(1)
from version import __version__
from genestack_exceptions import (GenestackAuthenticationException, GenestackBaseException,
GenestackConnectionFailure, GenestackException,
GenestackResponseError, GenestackServerException,
GenestackVersionException)
from genestack_connection import Connection, Application
from file_types import FileTypes
from file_permissions import Permissions
from metainfo_scalar_values import *
from bio_meta_keys import BioMetaKeys
from genestack_metainfo import Metainfo
from bio_metainfo import BioMetainfo
from data_importer import DataImporter
from file_initializer import FileInitializer
from genome_query import GenomeQuery
from utils import get_connection, get_user, make_connection_parser, validate_constant
from file_filters import *
from share_util import ShareUtil
- from files_util import FilesUtil, MatchType, SortOrder, SpecialFolders
+ from files_util import FilesUtil, SortOrder, SpecialFolders
from datasets_util import DatasetsUtil
from groups_util import GroupsUtil
from organization_util import OrganizationUtil
from task_log_viewer import TaskLogViewer
from cla import *
from expression_navigator import *
|
Delete findMetainfoRelatedTerms method from Genestack Core findMetainfoRelatedTerms is deleted
|
## Code Before:
import sys
if not ((2, 7, 5) <= sys.version_info < (3, 0)):
sys.stderr.write(
'Python version "%s" is not supported. Required version 2.7.5+, Python 3 is not supported\n' % sys.version
)
exit(1)
from version import __version__
from genestack_exceptions import (GenestackAuthenticationException, GenestackBaseException,
GenestackConnectionFailure, GenestackException,
GenestackResponseError, GenestackServerException,
GenestackVersionException)
from genestack_connection import Connection, Application
from file_types import FileTypes
from file_permissions import Permissions
from metainfo_scalar_values import *
from bio_meta_keys import BioMetaKeys
from genestack_metainfo import Metainfo
from bio_metainfo import BioMetainfo
from data_importer import DataImporter
from file_initializer import FileInitializer
from genome_query import GenomeQuery
from utils import get_connection, get_user, make_connection_parser, validate_constant
from file_filters import *
from share_util import ShareUtil
from files_util import FilesUtil, MatchType, SortOrder, SpecialFolders
from datasets_util import DatasetsUtil
from groups_util import GroupsUtil
from organization_util import OrganizationUtil
from task_log_viewer import TaskLogViewer
from cla import *
from expression_navigator import *
## Instruction:
Delete findMetainfoRelatedTerms method from Genestack Core findMetainfoRelatedTerms is deleted
## Code After:
import sys
if not ((2, 7, 5) <= sys.version_info < (3, 0)):
sys.stderr.write(
'Python version "%s" is not supported. Required version 2.7.5+, Python 3 is not supported\n' % sys.version
)
exit(1)
from version import __version__
from genestack_exceptions import (GenestackAuthenticationException, GenestackBaseException,
GenestackConnectionFailure, GenestackException,
GenestackResponseError, GenestackServerException,
GenestackVersionException)
from genestack_connection import Connection, Application
from file_types import FileTypes
from file_permissions import Permissions
from metainfo_scalar_values import *
from bio_meta_keys import BioMetaKeys
from genestack_metainfo import Metainfo
from bio_metainfo import BioMetainfo
from data_importer import DataImporter
from file_initializer import FileInitializer
from genome_query import GenomeQuery
from utils import get_connection, get_user, make_connection_parser, validate_constant
from file_filters import *
from share_util import ShareUtil
from files_util import FilesUtil, SortOrder, SpecialFolders
from datasets_util import DatasetsUtil
from groups_util import GroupsUtil
from organization_util import OrganizationUtil
from task_log_viewer import TaskLogViewer
from cla import *
from expression_navigator import *
|
import sys
if not ((2, 7, 5) <= sys.version_info < (3, 0)):
sys.stderr.write(
'Python version "%s" is not supported. Required version 2.7.5+, Python 3 is not supported\n' % sys.version
)
exit(1)
from version import __version__
from genestack_exceptions import (GenestackAuthenticationException, GenestackBaseException,
GenestackConnectionFailure, GenestackException,
GenestackResponseError, GenestackServerException,
GenestackVersionException)
from genestack_connection import Connection, Application
from file_types import FileTypes
from file_permissions import Permissions
from metainfo_scalar_values import *
from bio_meta_keys import BioMetaKeys
from genestack_metainfo import Metainfo
from bio_metainfo import BioMetainfo
from data_importer import DataImporter
from file_initializer import FileInitializer
from genome_query import GenomeQuery
from utils import get_connection, get_user, make_connection_parser, validate_constant
from file_filters import *
from share_util import ShareUtil
- from files_util import FilesUtil, MatchType, SortOrder, SpecialFolders
? -----------
+ from files_util import FilesUtil, SortOrder, SpecialFolders
from datasets_util import DatasetsUtil
from groups_util import GroupsUtil
from organization_util import OrganizationUtil
from task_log_viewer import TaskLogViewer
from cla import *
from expression_navigator import *
|
043b5e7026663c8fdae8df4f27d3887ef881d405
|
src/viewsapp/views.py
|
src/viewsapp/views.py
|
from django.shortcuts import (
get_object_or_404, redirect, render)
from django.views.generic import View
from .forms import ExampleForm
from .models import ExampleModel
class ModelDetail(View):
def get(self, request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
class ModelCreate(View):
context_object_name = 'form'
form_class = ExampleForm
template_name = 'viewsapp/form.html'
def get(self, request, *args, **kwargs):
return render(
request,
self.template_name,
{self.context_object_name:
self.form_class()})
def post(self, request, *args, **kwargs):
bound_form = self.form_class(request.POST)
if bound_form.is_valid():
new_obj = bound_form.save()
return redirect(new_obj)
return render(
request,
self.template_name,
{self.context_object_name:
bound_form})
|
from django.shortcuts import redirect, render
from django.views.generic import DetailView, View
from .forms import ExampleForm
from .models import ExampleModel
class ModelDetail(DetailView):
model = ExampleModel
template_name = 'viewsapp/detail.html'
class ModelCreate(View):
context_object_name = 'form'
form_class = ExampleForm
template_name = 'viewsapp/form.html'
def get(self, request, *args, **kwargs):
return render(
request,
self.template_name,
{self.context_object_name:
self.form_class()})
def post(self, request, *args, **kwargs):
bound_form = self.form_class(request.POST)
if bound_form.is_valid():
new_obj = bound_form.save()
return redirect(new_obj)
return render(
request,
self.template_name,
{self.context_object_name:
bound_form})
|
Refactor ModelDetail to inherit DetailView GCBV.
|
Refactor ModelDetail to inherit DetailView GCBV.
|
Python
|
bsd-2-clause
|
jambonrose/djangocon2015-views,jambonrose/djangocon2015-views
|
- from django.shortcuts import (
+ from django.shortcuts import redirect, render
- get_object_or_404, redirect, render)
- from django.views.generic import View
+ from django.views.generic import DetailView, View
from .forms import ExampleForm
from .models import ExampleModel
- class ModelDetail(View):
+ class ModelDetail(DetailView):
+ model = ExampleModel
+ template_name = 'viewsapp/detail.html'
-
- def get(self, request, *args, **kwargs):
- request_slug = kwargs.get('slug')
- example_obj = get_object_or_404(
- ExampleModel, slug=request_slug)
- return render(
- request,
- 'viewsapp/detail.html',
- {'object': example_obj})
class ModelCreate(View):
context_object_name = 'form'
form_class = ExampleForm
template_name = 'viewsapp/form.html'
def get(self, request, *args, **kwargs):
return render(
request,
self.template_name,
{self.context_object_name:
self.form_class()})
def post(self, request, *args, **kwargs):
bound_form = self.form_class(request.POST)
if bound_form.is_valid():
new_obj = bound_form.save()
return redirect(new_obj)
return render(
request,
self.template_name,
{self.context_object_name:
bound_form})
|
Refactor ModelDetail to inherit DetailView GCBV.
|
## Code Before:
from django.shortcuts import (
get_object_or_404, redirect, render)
from django.views.generic import View
from .forms import ExampleForm
from .models import ExampleModel
class ModelDetail(View):
def get(self, request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
class ModelCreate(View):
context_object_name = 'form'
form_class = ExampleForm
template_name = 'viewsapp/form.html'
def get(self, request, *args, **kwargs):
return render(
request,
self.template_name,
{self.context_object_name:
self.form_class()})
def post(self, request, *args, **kwargs):
bound_form = self.form_class(request.POST)
if bound_form.is_valid():
new_obj = bound_form.save()
return redirect(new_obj)
return render(
request,
self.template_name,
{self.context_object_name:
bound_form})
## Instruction:
Refactor ModelDetail to inherit DetailView GCBV.
## Code After:
from django.shortcuts import redirect, render
from django.views.generic import DetailView, View
from .forms import ExampleForm
from .models import ExampleModel
class ModelDetail(DetailView):
model = ExampleModel
template_name = 'viewsapp/detail.html'
class ModelCreate(View):
context_object_name = 'form'
form_class = ExampleForm
template_name = 'viewsapp/form.html'
def get(self, request, *args, **kwargs):
return render(
request,
self.template_name,
{self.context_object_name:
self.form_class()})
def post(self, request, *args, **kwargs):
bound_form = self.form_class(request.POST)
if bound_form.is_valid():
new_obj = bound_form.save()
return redirect(new_obj)
return render(
request,
self.template_name,
{self.context_object_name:
bound_form})
|
- from django.shortcuts import (
? ^
+ from django.shortcuts import redirect, render
? ^^^^^^^^^^^^^^^^
- get_object_or_404, redirect, render)
- from django.views.generic import View
+ from django.views.generic import DetailView, View
? ++++++ ++++++
from .forms import ExampleForm
from .models import ExampleModel
- class ModelDetail(View):
+ class ModelDetail(DetailView):
? ++++++
+ model = ExampleModel
+ template_name = 'viewsapp/detail.html'
-
- def get(self, request, *args, **kwargs):
- request_slug = kwargs.get('slug')
- example_obj = get_object_or_404(
- ExampleModel, slug=request_slug)
- return render(
- request,
- 'viewsapp/detail.html',
- {'object': example_obj})
class ModelCreate(View):
context_object_name = 'form'
form_class = ExampleForm
template_name = 'viewsapp/form.html'
def get(self, request, *args, **kwargs):
return render(
request,
self.template_name,
{self.context_object_name:
self.form_class()})
def post(self, request, *args, **kwargs):
bound_form = self.form_class(request.POST)
if bound_form.is_valid():
new_obj = bound_form.save()
return redirect(new_obj)
return render(
request,
self.template_name,
{self.context_object_name:
bound_form})
|
675c05bd685d550e3c46137f2f52dcdb125cefa0
|
tests/test_speed.py
|
tests/test_speed.py
|
import os, sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import glob
import fnmatch
import traceback
import logging
import numpy
import pytest
import lasio
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
stegfn = lambda vers, fn: os.path.join(os.path.dirname(__file__), "examples", vers, fn)
logger = logging.getLogger(__name__)
def test_read_v12_sample_big():
l = lasio.read(stegfn("1.2", "sample_big.las"))
|
import os, sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import glob
import fnmatch
import traceback
import logging
import numpy
import pytest
import lasio
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
stegfn = lambda vers, fn: os.path.join(os.path.dirname(__file__), "examples", vers, fn)
logger = logging.getLogger(__name__)
def read_file():
las = lasio.read(stegfn("1.2", "sample_big.las"))
def test_read_v12_sample_big(benchmark):
benchmark(read_file)
|
Add benchmark test for the speed of reading a LAS file
|
Add benchmark test for the speed of reading a LAS file
To run it you need to have `pytest-benchmark` installed, and
run the tests using:
```
$ pytest lasio/tests/tests_speed.py
```
To compare two branches, you need to run and store the benchmark from the first branch e.g. master
and then run and compare the benchmark from the second branch. e.g.
```
$ git checkout master
$ mkdir ..\lasio-benchmarks
$ pytest tests/\test_speed.py --benchmark-autosave --benchmark-storage ..\lasio-benchmarks --benchmark-compare
|
Python
|
mit
|
kwinkunks/lasio,kinverarity1/lasio,kinverarity1/las-reader
|
import os, sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import glob
import fnmatch
import traceback
import logging
import numpy
import pytest
import lasio
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
stegfn = lambda vers, fn: os.path.join(os.path.dirname(__file__), "examples", vers, fn)
logger = logging.getLogger(__name__)
- def test_read_v12_sample_big():
+ def read_file():
- l = lasio.read(stegfn("1.2", "sample_big.las"))
+ las = lasio.read(stegfn("1.2", "sample_big.las"))
+ def test_read_v12_sample_big(benchmark):
+ benchmark(read_file)
+
|
Add benchmark test for the speed of reading a LAS file
|
## Code Before:
import os, sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import glob
import fnmatch
import traceback
import logging
import numpy
import pytest
import lasio
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
stegfn = lambda vers, fn: os.path.join(os.path.dirname(__file__), "examples", vers, fn)
logger = logging.getLogger(__name__)
def test_read_v12_sample_big():
l = lasio.read(stegfn("1.2", "sample_big.las"))
## Instruction:
Add benchmark test for the speed of reading a LAS file
## Code After:
import os, sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import glob
import fnmatch
import traceback
import logging
import numpy
import pytest
import lasio
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
stegfn = lambda vers, fn: os.path.join(os.path.dirname(__file__), "examples", vers, fn)
logger = logging.getLogger(__name__)
def read_file():
las = lasio.read(stegfn("1.2", "sample_big.las"))
def test_read_v12_sample_big(benchmark):
benchmark(read_file)
|
import os, sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import glob
import fnmatch
import traceback
import logging
import numpy
import pytest
import lasio
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
stegfn = lambda vers, fn: os.path.join(os.path.dirname(__file__), "examples", vers, fn)
logger = logging.getLogger(__name__)
- def test_read_v12_sample_big():
+ def read_file():
- l = lasio.read(stegfn("1.2", "sample_big.las"))
+ las = lasio.read(stegfn("1.2", "sample_big.las"))
? ++
+
+ def test_read_v12_sample_big(benchmark):
+ benchmark(read_file)
|
184cc6448a7bed4c945b0c5cb1e3739c3fb9c7f8
|
examples/list_vmss_pips.py
|
examples/list_vmss_pips.py
|
import azurerm
import json
import sys
# check for single command argument
if len(sys.argv) == 3:
rg = sys.argv[1]
vmss = sys.argv[2]
else:
sys.exit('Expecting resource group name and vmss name as arguments.')
# Load Azure app defaults
try:
with open('azurermconfig.json') as configFile:
configData = json.load(configFile)
except FileNotFoundError:
print("Error: Expecting azurermconfig.json in current folder")
sys.exit()
tenant_id = configData['tenantId']
app_id = configData['appId']
app_secret = configData['appSecret']
subscription_id = configData['subscriptionId']
access_token = azurerm.get_access_token(tenant_id, app_id, app_secret)
public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rg, vmss)
print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': ')))
|
import argparse
import azurerm
import json
import re
import sys
# validate command line arguments
argParser = argparse.ArgumentParser()
argParser.add_argument('--vmssname', '-n', required=True,
action='store', help='VMSS Name')
argParser.add_argument('--rgname', '-g', required=True,
action='store', help='Resource Group Name')
argParser.add_argument('--details', '-a', required=False,
action='store', help='Print all details')
args = argParser.parse_args()
name = args.vmssname
rgname = args.rgname
details = args.details
# Load Azure app defaults
try:
with open('azurermconfig.json') as configFile:
configData = json.load(configFile)
except FileNotFoundError:
print("Error: Expecting azurermconfig.json in current folder")
sys.exit()
tenant_id = configData['tenantId']
app_id = configData['appId']
app_secret = configData['appSecret']
subscription_id = configData['subscriptionId']
access_token = azurerm.get_access_token(tenant_id, app_id, app_secret)
public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name)
if details is True:
print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': ')))
else:
for ip in public_ips['value']:
vm_id = re.search('Machines/(.*)/networkInt', ip['id']).group(1)
ipaddr = ip['properties']['ipAddress']
print('VM id: ' + vm_id + ', IP: ' + ipaddr)
|
Improve list VMSS public IP example
|
Improve list VMSS public IP example
|
Python
|
mit
|
gbowerman/azurerm
|
+ import argparse
import azurerm
import json
+ import re
import sys
- # check for single command argument
- if len(sys.argv) == 3:
- rg = sys.argv[1]
- vmss = sys.argv[2]
- else:
- sys.exit('Expecting resource group name and vmss name as arguments.')
+ # validate command line arguments
+ argParser = argparse.ArgumentParser()
+
+ argParser.add_argument('--vmssname', '-n', required=True,
+ action='store', help='VMSS Name')
+ argParser.add_argument('--rgname', '-g', required=True,
+ action='store', help='Resource Group Name')
+ argParser.add_argument('--details', '-a', required=False,
+ action='store', help='Print all details')
+
+ args = argParser.parse_args()
+
+ name = args.vmssname
+ rgname = args.rgname
+ details = args.details
+
# Load Azure app defaults
try:
with open('azurermconfig.json') as configFile:
configData = json.load(configFile)
except FileNotFoundError:
print("Error: Expecting azurermconfig.json in current folder")
sys.exit()
tenant_id = configData['tenantId']
app_id = configData['appId']
app_secret = configData['appSecret']
subscription_id = configData['subscriptionId']
access_token = azurerm.get_access_token(tenant_id, app_id, app_secret)
- public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rg, vmss)
+ public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name)
+
+ if details is True:
- print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': ')))
+ print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': ')))
+ else:
+ for ip in public_ips['value']:
+ vm_id = re.search('Machines/(.*)/networkInt', ip['id']).group(1)
+ ipaddr = ip['properties']['ipAddress']
+ print('VM id: ' + vm_id + ', IP: ' + ipaddr)
|
Improve list VMSS public IP example
|
## Code Before:
import azurerm
import json
import sys
# check for single command argument
if len(sys.argv) == 3:
rg = sys.argv[1]
vmss = sys.argv[2]
else:
sys.exit('Expecting resource group name and vmss name as arguments.')
# Load Azure app defaults
try:
with open('azurermconfig.json') as configFile:
configData = json.load(configFile)
except FileNotFoundError:
print("Error: Expecting azurermconfig.json in current folder")
sys.exit()
tenant_id = configData['tenantId']
app_id = configData['appId']
app_secret = configData['appSecret']
subscription_id = configData['subscriptionId']
access_token = azurerm.get_access_token(tenant_id, app_id, app_secret)
public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rg, vmss)
print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': ')))
## Instruction:
Improve list VMSS public IP example
## Code After:
import argparse
import azurerm
import json
import re
import sys
# validate command line arguments
argParser = argparse.ArgumentParser()
argParser.add_argument('--vmssname', '-n', required=True,
action='store', help='VMSS Name')
argParser.add_argument('--rgname', '-g', required=True,
action='store', help='Resource Group Name')
argParser.add_argument('--details', '-a', required=False,
action='store', help='Print all details')
args = argParser.parse_args()
name = args.vmssname
rgname = args.rgname
details = args.details
# Load Azure app defaults
try:
with open('azurermconfig.json') as configFile:
configData = json.load(configFile)
except FileNotFoundError:
print("Error: Expecting azurermconfig.json in current folder")
sys.exit()
tenant_id = configData['tenantId']
app_id = configData['appId']
app_secret = configData['appSecret']
subscription_id = configData['subscriptionId']
access_token = azurerm.get_access_token(tenant_id, app_id, app_secret)
public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name)
if details is True:
print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': ')))
else:
for ip in public_ips['value']:
vm_id = re.search('Machines/(.*)/networkInt', ip['id']).group(1)
ipaddr = ip['properties']['ipAddress']
print('VM id: ' + vm_id + ', IP: ' + ipaddr)
|
+ import argparse
import azurerm
import json
+ import re
import sys
- # check for single command argument
- if len(sys.argv) == 3:
- rg = sys.argv[1]
- vmss = sys.argv[2]
- else:
- sys.exit('Expecting resource group name and vmss name as arguments.')
+ # validate command line arguments
+ argParser = argparse.ArgumentParser()
+
+ argParser.add_argument('--vmssname', '-n', required=True,
+ action='store', help='VMSS Name')
+ argParser.add_argument('--rgname', '-g', required=True,
+ action='store', help='Resource Group Name')
+ argParser.add_argument('--details', '-a', required=False,
+ action='store', help='Print all details')
+
+ args = argParser.parse_args()
+
+ name = args.vmssname
+ rgname = args.rgname
+ details = args.details
+
# Load Azure app defaults
try:
with open('azurermconfig.json') as configFile:
configData = json.load(configFile)
except FileNotFoundError:
print("Error: Expecting azurermconfig.json in current folder")
sys.exit()
tenant_id = configData['tenantId']
app_id = configData['appId']
app_secret = configData['appSecret']
subscription_id = configData['subscriptionId']
access_token = azurerm.get_access_token(tenant_id, app_id, app_secret)
- public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rg, vmss)
? ^ ^^
+ public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name)
? ++++ ^^ ^
- print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': ')))
+ if details is True:
+ print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': ')))
+ else:
+ for ip in public_ips['value']:
+ vm_id = re.search('Machines/(.*)/networkInt', ip['id']).group(1)
+ ipaddr = ip['properties']['ipAddress']
+ print('VM id: ' + vm_id + ', IP: ' + ipaddr)
+
|
65b4c081c3a66ccd373062f8e7c1d63295c8d8d1
|
cache_relation/app_settings.py
|
cache_relation/app_settings.py
|
CACHE_RELATION_DEFAULT_DURATION = 60 * 60 * 24 * 3
|
from django.conf import settings
# Default cache timeout
CACHE_RELATION_DEFAULT_DURATION = getattr(
settings,
'CACHE_RELATION_DEFAULT_DURATION',
60 * 60 * 24 * 3,
)
|
Allow global settings to override.
|
Allow global settings to override.
Signed-off-by: Chris Lamb <[email protected]>
|
Python
|
bsd-3-clause
|
thread/django-sensible-caching,playfire/django-cache-toolbox,lamby/django-sensible-caching,lamby/django-cache-toolbox
|
- CACHE_RELATION_DEFAULT_DURATION = 60 * 60 * 24 * 3
+ from django.conf import settings
+ # Default cache timeout
+ CACHE_RELATION_DEFAULT_DURATION = getattr(
+ settings,
+ 'CACHE_RELATION_DEFAULT_DURATION',
+ 60 * 60 * 24 * 3,
+ )
+
|
Allow global settings to override.
|
## Code Before:
CACHE_RELATION_DEFAULT_DURATION = 60 * 60 * 24 * 3
## Instruction:
Allow global settings to override.
## Code After:
from django.conf import settings
# Default cache timeout
CACHE_RELATION_DEFAULT_DURATION = getattr(
settings,
'CACHE_RELATION_DEFAULT_DURATION',
60 * 60 * 24 * 3,
)
|
- CACHE_RELATION_DEFAULT_DURATION = 60 * 60 * 24 * 3
+ from django.conf import settings
+
+ # Default cache timeout
+ CACHE_RELATION_DEFAULT_DURATION = getattr(
+ settings,
+ 'CACHE_RELATION_DEFAULT_DURATION',
+ 60 * 60 * 24 * 3,
+ )
|
f67746750bdd2a1d6e662b1fc36d5a6fa13098c5
|
scripts/generate.py
|
scripts/generate.py
|
params = [
("dict(dim=250, dim_mlp=250)", "run1"),
("dict(dim=500, dim_mlp=500)", "run2"),
("dict(rank_n_approx=200)", "run3"),
("dict(rank_n_approx=500)", "run4"),
("dict(avg_word=False)", "run5")
]
for options, name in params:
with open("{}.sh".format(name), "w") as script:
log = "{}.log".format(name)
print >>script, template.format(**locals())
|
params = [
("dict(dim=250, dim_mlp=250, prefix='model_run1_')", "run1"),
("dict(dim=500, dim_mlp=500, prefix='model_run2_')", "run2"),
("dict(rank_n_approx=200, prefix='model_run3_')", "run3"),
("dict(rank_n_approx=500, prefix='model_run4_')", "run4"),
("dict(avg_word=False, prefix='model_run5_')", "run5")
]
for options, name in params:
with open("{}.sh".format(name), "w") as script:
log = "{}.log".format(name)
print >>script, template.format(**locals())
|
Add different prefixes for the experiments
|
Add different prefixes for the experiments
|
Python
|
bsd-3-clause
|
rizar/groundhog-private
|
params = [
- ("dict(dim=250, dim_mlp=250)", "run1"),
+ ("dict(dim=250, dim_mlp=250, prefix='model_run1_')", "run1"),
- ("dict(dim=500, dim_mlp=500)", "run2"),
+ ("dict(dim=500, dim_mlp=500, prefix='model_run2_')", "run2"),
- ("dict(rank_n_approx=200)", "run3"),
+ ("dict(rank_n_approx=200, prefix='model_run3_')", "run3"),
- ("dict(rank_n_approx=500)", "run4"),
+ ("dict(rank_n_approx=500, prefix='model_run4_')", "run4"),
- ("dict(avg_word=False)", "run5")
+ ("dict(avg_word=False, prefix='model_run5_')", "run5")
]
for options, name in params:
with open("{}.sh".format(name), "w") as script:
log = "{}.log".format(name)
print >>script, template.format(**locals())
|
Add different prefixes for the experiments
|
## Code Before:
params = [
("dict(dim=250, dim_mlp=250)", "run1"),
("dict(dim=500, dim_mlp=500)", "run2"),
("dict(rank_n_approx=200)", "run3"),
("dict(rank_n_approx=500)", "run4"),
("dict(avg_word=False)", "run5")
]
for options, name in params:
with open("{}.sh".format(name), "w") as script:
log = "{}.log".format(name)
print >>script, template.format(**locals())
## Instruction:
Add different prefixes for the experiments
## Code After:
params = [
("dict(dim=250, dim_mlp=250, prefix='model_run1_')", "run1"),
("dict(dim=500, dim_mlp=500, prefix='model_run2_')", "run2"),
("dict(rank_n_approx=200, prefix='model_run3_')", "run3"),
("dict(rank_n_approx=500, prefix='model_run4_')", "run4"),
("dict(avg_word=False, prefix='model_run5_')", "run5")
]
for options, name in params:
with open("{}.sh".format(name), "w") as script:
log = "{}.log".format(name)
print >>script, template.format(**locals())
|
params = [
- ("dict(dim=250, dim_mlp=250)", "run1"),
+ ("dict(dim=250, dim_mlp=250, prefix='model_run1_')", "run1"),
? ++++++++++++++++++++++
- ("dict(dim=500, dim_mlp=500)", "run2"),
+ ("dict(dim=500, dim_mlp=500, prefix='model_run2_')", "run2"),
? ++++++++++++++++++++++
- ("dict(rank_n_approx=200)", "run3"),
+ ("dict(rank_n_approx=200, prefix='model_run3_')", "run3"),
? ++++++++++++++++++++++
- ("dict(rank_n_approx=500)", "run4"),
+ ("dict(rank_n_approx=500, prefix='model_run4_')", "run4"),
? ++++++++++++++++++++++
- ("dict(avg_word=False)", "run5")
+ ("dict(avg_word=False, prefix='model_run5_')", "run5")
? ++++++++++++++++++++++
]
for options, name in params:
with open("{}.sh".format(name), "w") as script:
log = "{}.log".format(name)
print >>script, template.format(**locals())
|
5d2a4ac0e48d404a16b81d2f290be5ec13bdf8f1
|
logintokens/forms.py
|
logintokens/forms.py
|
from django import forms
from django.contrib.auth import get_user_model
from django.core.mail import EmailMultiAlternatives
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse_lazy
from logintokens.tokens import default_token_generator
USER = get_user_model()
class TokenLoginForm(forms.Form):
email = forms.EmailField(label="Email", max_length=254)
def generate_login_link(self, email, request):
protocol = 'https' if request.is_secure() else 'http'
domain = get_current_site(request).domain
url = reverse_lazy('token_login')
token = default_token_generator.make_token(email)
return '{}://{}{}?token={}'.format(protocol, domain, url, token)
def save(self, request):
"""Generate a login token and send it to the email from the form.
"""
email = self.cleaned_data['email']
body = 'To complete the login process, simply click on this link: {}'
login_link = self.generate_login_link(email, request)
email_message = EmailMultiAlternatives(
'Your login link for ANIAuth',
body.format(login_link),
to=[email]
)
email_message.send()
|
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import UsernameField
from django.core.mail import EmailMultiAlternatives
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse_lazy
from logintokens.tokens import default_token_generator
USER = get_user_model()
class TokenLoginForm(forms.Form):
email = UsernameField(
max_length=254,
widget=forms.TextInput(attrs={'autofocus': True}),
)
def generate_login_link(self, username, request):
protocol = 'https' if request.is_secure() else 'http'
domain = get_current_site(request).domain
url = reverse_lazy('token_login')
token = default_token_generator.make_token(username)
return '{}://{}{}?token={}'.format(protocol, domain, url, token)
def save(self, request):
"""Generate a login token and send it to the email from the form.
"""
username = self.cleaned_data['email']
try:
user = USER._default_manager.get_by_natural_key(username)
email = getattr(user, USER.EMAIL_FIELD)
except USER.DoesNotExist:
email = username
body = 'To complete the login process, simply click on this link: {}'
login_link = self.generate_login_link(username, request)
email_message = EmailMultiAlternatives(
'Your login link for ANIAuth',
body.format(login_link),
to=[email]
)
email_message.send()
|
Update form to pass new test
|
Update form to pass new test
|
Python
|
mit
|
randomic/aniauth-tdd,randomic/aniauth-tdd
|
from django import forms
from django.contrib.auth import get_user_model
+ from django.contrib.auth.forms import UsernameField
from django.core.mail import EmailMultiAlternatives
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse_lazy
from logintokens.tokens import default_token_generator
USER = get_user_model()
class TokenLoginForm(forms.Form):
- email = forms.EmailField(label="Email", max_length=254)
+ email = UsernameField(
+ max_length=254,
+ widget=forms.TextInput(attrs={'autofocus': True}),
+ )
- def generate_login_link(self, email, request):
+ def generate_login_link(self, username, request):
protocol = 'https' if request.is_secure() else 'http'
domain = get_current_site(request).domain
url = reverse_lazy('token_login')
- token = default_token_generator.make_token(email)
+ token = default_token_generator.make_token(username)
return '{}://{}{}?token={}'.format(protocol, domain, url, token)
def save(self, request):
"""Generate a login token and send it to the email from the form.
"""
- email = self.cleaned_data['email']
+ username = self.cleaned_data['email']
+ try:
+ user = USER._default_manager.get_by_natural_key(username)
+ email = getattr(user, USER.EMAIL_FIELD)
+ except USER.DoesNotExist:
+ email = username
body = 'To complete the login process, simply click on this link: {}'
- login_link = self.generate_login_link(email, request)
+ login_link = self.generate_login_link(username, request)
email_message = EmailMultiAlternatives(
'Your login link for ANIAuth',
body.format(login_link),
to=[email]
)
email_message.send()
|
Update form to pass new test
|
## Code Before:
from django import forms
from django.contrib.auth import get_user_model
from django.core.mail import EmailMultiAlternatives
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse_lazy
from logintokens.tokens import default_token_generator
USER = get_user_model()
class TokenLoginForm(forms.Form):
email = forms.EmailField(label="Email", max_length=254)
def generate_login_link(self, email, request):
protocol = 'https' if request.is_secure() else 'http'
domain = get_current_site(request).domain
url = reverse_lazy('token_login')
token = default_token_generator.make_token(email)
return '{}://{}{}?token={}'.format(protocol, domain, url, token)
def save(self, request):
"""Generate a login token and send it to the email from the form.
"""
email = self.cleaned_data['email']
body = 'To complete the login process, simply click on this link: {}'
login_link = self.generate_login_link(email, request)
email_message = EmailMultiAlternatives(
'Your login link for ANIAuth',
body.format(login_link),
to=[email]
)
email_message.send()
## Instruction:
Update form to pass new test
## Code After:
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import UsernameField
from django.core.mail import EmailMultiAlternatives
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse_lazy
from logintokens.tokens import default_token_generator
USER = get_user_model()
class TokenLoginForm(forms.Form):
email = UsernameField(
max_length=254,
widget=forms.TextInput(attrs={'autofocus': True}),
)
def generate_login_link(self, username, request):
protocol = 'https' if request.is_secure() else 'http'
domain = get_current_site(request).domain
url = reverse_lazy('token_login')
token = default_token_generator.make_token(username)
return '{}://{}{}?token={}'.format(protocol, domain, url, token)
def save(self, request):
"""Generate a login token and send it to the email from the form.
"""
username = self.cleaned_data['email']
try:
user = USER._default_manager.get_by_natural_key(username)
email = getattr(user, USER.EMAIL_FIELD)
except USER.DoesNotExist:
email = username
body = 'To complete the login process, simply click on this link: {}'
login_link = self.generate_login_link(username, request)
email_message = EmailMultiAlternatives(
'Your login link for ANIAuth',
body.format(login_link),
to=[email]
)
email_message.send()
|
from django import forms
from django.contrib.auth import get_user_model
+ from django.contrib.auth.forms import UsernameField
from django.core.mail import EmailMultiAlternatives
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse_lazy
from logintokens.tokens import default_token_generator
USER = get_user_model()
class TokenLoginForm(forms.Form):
- email = forms.EmailField(label="Email", max_length=254)
+ email = UsernameField(
+ max_length=254,
+ widget=forms.TextInput(attrs={'autofocus': True}),
+ )
- def generate_login_link(self, email, request):
? ^^^
+ def generate_login_link(self, username, request):
? ++ +++ ^
protocol = 'https' if request.is_secure() else 'http'
domain = get_current_site(request).domain
url = reverse_lazy('token_login')
- token = default_token_generator.make_token(email)
? ^^^
+ token = default_token_generator.make_token(username)
? ++ +++ ^
return '{}://{}{}?token={}'.format(protocol, domain, url, token)
def save(self, request):
"""Generate a login token and send it to the email from the form.
"""
- email = self.cleaned_data['email']
? ^^^
+ username = self.cleaned_data['email']
? ++ +++ ^
+ try:
+ user = USER._default_manager.get_by_natural_key(username)
+ email = getattr(user, USER.EMAIL_FIELD)
+ except USER.DoesNotExist:
+ email = username
body = 'To complete the login process, simply click on this link: {}'
- login_link = self.generate_login_link(email, request)
? ^^^
+ login_link = self.generate_login_link(username, request)
? ++ +++ ^
email_message = EmailMultiAlternatives(
'Your login link for ANIAuth',
body.format(login_link),
to=[email]
)
email_message.send()
|
b8ca257a1a2727a9caa043739463e8cdf49c8d5a
|
news/middleware.py
|
news/middleware.py
|
from django.conf import settings
from django_statsd.clients import statsd
from django_statsd.middleware import GraphiteRequestTimingMiddleware
class GraphiteViewHitCountMiddleware(GraphiteRequestTimingMiddleware):
"""add hit counting to statsd's request timer."""
def process_view(self, request, view_func, view_args, view_kwargs):
super(GraphiteViewHitCountMiddleware, self).process_view(
request, view_func, view_args, view_kwargs)
if hasattr(request, '_view_name'):
data = dict(module=request._view_module, name=request._view_name,
method=request.method)
statsd.incr('view.count.{module}.{name}.{method}'.format(**data))
statsd.incr('view.count.{module}.{method}'.format(**data))
statsd.incr('view.count.{method}'.format(**data))
class HostnameMiddleware(object):
def __init__(self):
values = [getattr(settings, x) for x in ['HOSTNAME', 'DEIS_APP',
'DEIS_RELEASE', 'DEIS_DOMAIN']]
self.backend_server = '.'.join(x for x in values if x)
def process_response(self, request, response):
response['X-Backend-Server'] = self.backend_server
return response
|
from django.conf import settings
from django_statsd.clients import statsd
from django_statsd.middleware import GraphiteRequestTimingMiddleware
class GraphiteViewHitCountMiddleware(GraphiteRequestTimingMiddleware):
"""add hit counting to statsd's request timer."""
def process_view(self, request, view_func, view_args, view_kwargs):
super(GraphiteViewHitCountMiddleware, self).process_view(
request, view_func, view_args, view_kwargs)
if hasattr(request, '_view_name'):
secure = 'secure' if request.is_secure() else 'insecure'
data = dict(module=request._view_module, name=request._view_name,
method=request.method, secure=secure)
statsd.incr('view.count.{module}.{name}.{method}.{secure}'.format(**data))
statsd.incr('view.count.{module}.{name}.{method}'.format(**data))
statsd.incr('view.count.{module}.{method}.{secure}'.format(**data))
statsd.incr('view.count.{module}.{method}'.format(**data))
statsd.incr('view.count.{method}.{secure}'.format(**data))
statsd.incr('view.count.{method}'.format(**data))
class HostnameMiddleware(object):
def __init__(self):
values = [getattr(settings, x) for x in ['HOSTNAME', 'DEIS_APP',
'DEIS_RELEASE', 'DEIS_DOMAIN']]
self.backend_server = '.'.join(x for x in values if x)
def process_response(self, request, response):
response['X-Backend-Server'] = self.backend_server
return response
|
Add statsd data for (in)secure requests
|
Add statsd data for (in)secure requests
|
Python
|
mpl-2.0
|
glogiotatidis/basket,glogiotatidis/basket,glogiotatidis/basket
|
from django.conf import settings
from django_statsd.clients import statsd
from django_statsd.middleware import GraphiteRequestTimingMiddleware
class GraphiteViewHitCountMiddleware(GraphiteRequestTimingMiddleware):
"""add hit counting to statsd's request timer."""
def process_view(self, request, view_func, view_args, view_kwargs):
super(GraphiteViewHitCountMiddleware, self).process_view(
request, view_func, view_args, view_kwargs)
if hasattr(request, '_view_name'):
+ secure = 'secure' if request.is_secure() else 'insecure'
data = dict(module=request._view_module, name=request._view_name,
- method=request.method)
+ method=request.method, secure=secure)
+ statsd.incr('view.count.{module}.{name}.{method}.{secure}'.format(**data))
statsd.incr('view.count.{module}.{name}.{method}'.format(**data))
+ statsd.incr('view.count.{module}.{method}.{secure}'.format(**data))
statsd.incr('view.count.{module}.{method}'.format(**data))
+ statsd.incr('view.count.{method}.{secure}'.format(**data))
statsd.incr('view.count.{method}'.format(**data))
class HostnameMiddleware(object):
def __init__(self):
values = [getattr(settings, x) for x in ['HOSTNAME', 'DEIS_APP',
'DEIS_RELEASE', 'DEIS_DOMAIN']]
self.backend_server = '.'.join(x for x in values if x)
def process_response(self, request, response):
response['X-Backend-Server'] = self.backend_server
return response
|
Add statsd data for (in)secure requests
|
## Code Before:
from django.conf import settings
from django_statsd.clients import statsd
from django_statsd.middleware import GraphiteRequestTimingMiddleware
class GraphiteViewHitCountMiddleware(GraphiteRequestTimingMiddleware):
"""add hit counting to statsd's request timer."""
def process_view(self, request, view_func, view_args, view_kwargs):
super(GraphiteViewHitCountMiddleware, self).process_view(
request, view_func, view_args, view_kwargs)
if hasattr(request, '_view_name'):
data = dict(module=request._view_module, name=request._view_name,
method=request.method)
statsd.incr('view.count.{module}.{name}.{method}'.format(**data))
statsd.incr('view.count.{module}.{method}'.format(**data))
statsd.incr('view.count.{method}'.format(**data))
class HostnameMiddleware(object):
def __init__(self):
values = [getattr(settings, x) for x in ['HOSTNAME', 'DEIS_APP',
'DEIS_RELEASE', 'DEIS_DOMAIN']]
self.backend_server = '.'.join(x for x in values if x)
def process_response(self, request, response):
response['X-Backend-Server'] = self.backend_server
return response
## Instruction:
Add statsd data for (in)secure requests
## Code After:
from django.conf import settings
from django_statsd.clients import statsd
from django_statsd.middleware import GraphiteRequestTimingMiddleware
class GraphiteViewHitCountMiddleware(GraphiteRequestTimingMiddleware):
"""add hit counting to statsd's request timer."""
def process_view(self, request, view_func, view_args, view_kwargs):
super(GraphiteViewHitCountMiddleware, self).process_view(
request, view_func, view_args, view_kwargs)
if hasattr(request, '_view_name'):
secure = 'secure' if request.is_secure() else 'insecure'
data = dict(module=request._view_module, name=request._view_name,
method=request.method, secure=secure)
statsd.incr('view.count.{module}.{name}.{method}.{secure}'.format(**data))
statsd.incr('view.count.{module}.{name}.{method}'.format(**data))
statsd.incr('view.count.{module}.{method}.{secure}'.format(**data))
statsd.incr('view.count.{module}.{method}'.format(**data))
statsd.incr('view.count.{method}.{secure}'.format(**data))
statsd.incr('view.count.{method}'.format(**data))
class HostnameMiddleware(object):
def __init__(self):
values = [getattr(settings, x) for x in ['HOSTNAME', 'DEIS_APP',
'DEIS_RELEASE', 'DEIS_DOMAIN']]
self.backend_server = '.'.join(x for x in values if x)
def process_response(self, request, response):
response['X-Backend-Server'] = self.backend_server
return response
|
from django.conf import settings
from django_statsd.clients import statsd
from django_statsd.middleware import GraphiteRequestTimingMiddleware
class GraphiteViewHitCountMiddleware(GraphiteRequestTimingMiddleware):
"""add hit counting to statsd's request timer."""
def process_view(self, request, view_func, view_args, view_kwargs):
super(GraphiteViewHitCountMiddleware, self).process_view(
request, view_func, view_args, view_kwargs)
if hasattr(request, '_view_name'):
+ secure = 'secure' if request.is_secure() else 'insecure'
data = dict(module=request._view_module, name=request._view_name,
- method=request.method)
+ method=request.method, secure=secure)
? +++++++++++++++
+ statsd.incr('view.count.{module}.{name}.{method}.{secure}'.format(**data))
statsd.incr('view.count.{module}.{name}.{method}'.format(**data))
+ statsd.incr('view.count.{module}.{method}.{secure}'.format(**data))
statsd.incr('view.count.{module}.{method}'.format(**data))
+ statsd.incr('view.count.{method}.{secure}'.format(**data))
statsd.incr('view.count.{method}'.format(**data))
class HostnameMiddleware(object):
def __init__(self):
values = [getattr(settings, x) for x in ['HOSTNAME', 'DEIS_APP',
'DEIS_RELEASE', 'DEIS_DOMAIN']]
self.backend_server = '.'.join(x for x in values if x)
def process_response(self, request, response):
response['X-Backend-Server'] = self.backend_server
return response
|
39cc30f2f6c74d3a506c5d1a46cf0ccc6377b80f
|
pylibscrypt/__init__.py
|
pylibscrypt/__init__.py
|
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, get the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
|
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
|
Use pyscrypt.py in package import if libscrypt isn't available
|
Use pyscrypt.py in package import if libscrypt isn't available
|
Python
|
isc
|
jvarho/pylibscrypt,jvarho/pylibscrypt
|
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
+ # If that didn't work, try the scrypt module
+ if not _done:
+ try:
+ from pyscrypt import *
+ except ImportError:
+ pass
+ else:
+ _done = True
+
- # If that didn't work, get the inlined Python version
+ # If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
|
Use pyscrypt.py in package import if libscrypt isn't available
|
## Code Before:
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, get the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
## Instruction:
Use pyscrypt.py in package import if libscrypt isn't available
## Code After:
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
|
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
+ # If that didn't work, try the scrypt module
+ if not _done:
+ try:
+ from pyscrypt import *
+ except ImportError:
+ pass
+ else:
+ _done = True
+
- # If that didn't work, get the inlined Python version
? ----
+ # If that didn't work either, the inlined Python version
? +++++++
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
|
5f945f5335cd5d989401fe99b0752e98595748c0
|
chainer/functions/evaluation/binary_accuracy.py
|
chainer/functions/evaluation/binary_accuracy.py
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = (t != self.ignore_label).sum()
if int(count) == 0:
count = 1
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = xp.maximum(1, (t != self.ignore_label).sum())
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
Use maximum instead of if-statement
|
Use maximum instead of if-statement
|
Python
|
mit
|
cupy/cupy,keisuke-umezawa/chainer,benob/chainer,ktnyt/chainer,anaruse/chainer,AlpacaDB/chainer,ktnyt/chainer,rezoo/chainer,niboshi/chainer,ysekky/chainer,jnishi/chainer,keisuke-umezawa/chainer,jnishi/chainer,niboshi/chainer,hvy/chainer,chainer/chainer,okuta/chainer,okuta/chainer,wkentaro/chainer,keisuke-umezawa/chainer,ktnyt/chainer,niboshi/chainer,pfnet/chainer,wkentaro/chainer,aonotas/chainer,cupy/cupy,wkentaro/chainer,kashif/chainer,tkerola/chainer,delta2323/chainer,benob/chainer,AlpacaDB/chainer,hvy/chainer,hvy/chainer,kikusu/chainer,niboshi/chainer,kikusu/chainer,okuta/chainer,jnishi/chainer,cupy/cupy,chainer/chainer,wkentaro/chainer,kiyukuta/chainer,ktnyt/chainer,hvy/chainer,chainer/chainer,jnishi/chainer,cupy/cupy,chainer/chainer,okuta/chainer,ronekko/chainer,cemoody/chainer,keisuke-umezawa/chainer
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
- count = (t != self.ignore_label).sum()
+ count = xp.maximum(1, (t != self.ignore_label).sum())
- if int(count) == 0:
- count = 1
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
Use maximum instead of if-statement
|
## Code Before:
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = (t != self.ignore_label).sum()
if int(count) == 0:
count = 1
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
## Instruction:
Use maximum instead of if-statement
## Code After:
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = xp.maximum(1, (t != self.ignore_label).sum())
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
- count = (t != self.ignore_label).sum()
+ count = xp.maximum(1, (t != self.ignore_label).sum())
? ++++++++++++++ +
- if int(count) == 0:
- count = 1
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
7506e93942333a28f1e66c95016071760382a071
|
packages/Python/lldbsuite/test/repl/pounwrapping/TestPOUnwrapping.py
|
packages/Python/lldbsuite/test/repl/pounwrapping/TestPOUnwrapping.py
|
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
@decorators.expectedFailureAll(
oslist=[
"macosx",
"linux"],
bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
Revert "Disable test that fails on bot"
|
Revert "Disable test that fails on bot"
This reverts commit e214e46e748881e6418ffac374a87d6ad30fcfea.
I have reverted the swift commit that was causing this failure.
rdar://35264910
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
+
- @decorators.expectedFailureAll(
- oslist=[
- "macosx",
- "linux"],
- bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
Revert "Disable test that fails on bot"
|
## Code Before:
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
@decorators.expectedFailureAll(
oslist=[
"macosx",
"linux"],
bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
## Instruction:
Revert "Disable test that fails on bot"
## Code After:
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
+
- @decorators.expectedFailureAll(
- oslist=[
- "macosx",
- "linux"],
- bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.