commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
83908d3d8d3de0db5f8af26155137bf382afa24a
|
lettuce_webdriver/django.py
|
lettuce_webdriver/django.py
|
def site_url(url):
"""
Determine the server URL.
"""
base_url = 'http://%s' % socket.gethostname()
if server.port is not 80:
base_url += ':%d' % server.port
return urlparse.urljoin(base_url, url)
@step(r'I visit site page "([^"]*)"')
def visit_page(step, page):
"""
Visit the specific page of the site.
"""
step.given('I visit "%s"' % site_url(page))
|
import socket
import urlparse
from lettuce import step
from lettuce.django import server
# make sure the steps are loaded
import lettuce_webdriver.webdriver # pylint:disable=unused-import
def site_url(url):
"""
Determine the server URL.
"""
base_url = 'http://%s' % socket.gethostname()
if server.port is not 80:
base_url += ':%d' % server.port
return urlparse.urljoin(base_url, url)
@step(r'I visit site page "([^"]*)"')
def visit_page(self, page):
"""
Visit the specific page of the site.
"""
self.given('I visit "%s"' % site_url(page))
|
Fix bugs with lettuce-webdriver Django steps
|
Fix bugs with lettuce-webdriver Django steps
|
Python
|
mit
|
infoxchange/lettuce_webdriver,infoxchange/lettuce_webdriver,aloetesting/aloe_webdriver,infoxchange/aloe_webdriver,koterpillar/aloe_webdriver,macndesign/lettuce_webdriver,ponsfrilus/lettuce_webdriver,aloetesting/aloe_webdriver,ponsfrilus/lettuce_webdriver,bbangert/lettuce_webdriver,aloetesting/aloe_webdriver,bbangert/lettuce_webdriver,koterpillar/aloe_webdriver,macndesign/lettuce_webdriver,infoxchange/aloe_webdriver
|
+
+ import socket
+ import urlparse
+
+ from lettuce import step
+ from lettuce.django import server
+
+ # make sure the steps are loaded
+ import lettuce_webdriver.webdriver # pylint:disable=unused-import
def site_url(url):
"""
Determine the server URL.
"""
base_url = 'http://%s' % socket.gethostname()
if server.port is not 80:
base_url += ':%d' % server.port
return urlparse.urljoin(base_url, url)
@step(r'I visit site page "([^"]*)"')
- def visit_page(step, page):
+ def visit_page(self, page):
"""
Visit the specific page of the site.
"""
- step.given('I visit "%s"' % site_url(page))
+ self.given('I visit "%s"' % site_url(page))
|
Fix bugs with lettuce-webdriver Django steps
|
## Code Before:
def site_url(url):
"""
Determine the server URL.
"""
base_url = 'http://%s' % socket.gethostname()
if server.port is not 80:
base_url += ':%d' % server.port
return urlparse.urljoin(base_url, url)
@step(r'I visit site page "([^"]*)"')
def visit_page(step, page):
"""
Visit the specific page of the site.
"""
step.given('I visit "%s"' % site_url(page))
## Instruction:
Fix bugs with lettuce-webdriver Django steps
## Code After:
import socket
import urlparse
from lettuce import step
from lettuce.django import server
# make sure the steps are loaded
import lettuce_webdriver.webdriver # pylint:disable=unused-import
def site_url(url):
"""
Determine the server URL.
"""
base_url = 'http://%s' % socket.gethostname()
if server.port is not 80:
base_url += ':%d' % server.port
return urlparse.urljoin(base_url, url)
@step(r'I visit site page "([^"]*)"')
def visit_page(self, page):
"""
Visit the specific page of the site.
"""
self.given('I visit "%s"' % site_url(page))
|
+
+ import socket
+ import urlparse
+
+ from lettuce import step
+ from lettuce.django import server
+
+ # make sure the steps are loaded
+ import lettuce_webdriver.webdriver # pylint:disable=unused-import
def site_url(url):
"""
Determine the server URL.
"""
base_url = 'http://%s' % socket.gethostname()
if server.port is not 80:
base_url += ':%d' % server.port
return urlparse.urljoin(base_url, url)
@step(r'I visit site page "([^"]*)"')
- def visit_page(step, page):
? - ^
+ def visit_page(self, page):
? ^^
"""
Visit the specific page of the site.
"""
- step.given('I visit "%s"' % site_url(page))
? - ^
+ self.given('I visit "%s"' % site_url(page))
? ^^
|
27ee536137a98a317f2cfbb2010fa5fe31037e99
|
txircd/modules/cmd_user.py
|
txircd/modules/cmd_user.py
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
def onUse(self, user, params):
if user.registered == 0:
self.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
if not user.username:
user.registered -= 1
user.username = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
if not user.username:
user.registered += 1
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
return
user.realname = params[3]
if user.registered == 0:
user.register()
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"]
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
def onUse(self, user, data):
if not user.username:
user.registered -= 1
user.username = data["ident"]
user.realname = data["gecos"]
if user.registered == 0:
user.register()
def processParams(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return {}
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
return {}
ident = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
if not ident:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
return {}
return {
"user": user,
"ident": ident,
"gecos": params[3]
}
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"]
|
Update the USER command to take advantage of core capabilities as well
|
Update the USER command to take advantage of core capabilities as well
|
Python
|
bsd-3-clause
|
DesertBus/txircd,Heufneutje/txircd,ElementalAlchemist/txircd
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
- def onUse(self, user, params):
+ def onUse(self, user, data):
+ if not user.username:
+ user.registered -= 1
+ user.username = data["ident"]
+ user.realname = data["gecos"]
if user.registered == 0:
+ user.register()
+
+ def processParams(self, user, params):
+ if user.registered == 0:
- self.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
+ user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
- return
+ return {}
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
+ return {}
- if not user.username:
- user.registered -= 1
- user.username = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
+ ident = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
+ if not ident:
- if not user.username:
- user.registered += 1
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
- return
+ return {}
- user.realname = params[3]
- if user.registered == 0:
- user.register()
+ return {
+ "user": user,
+ "ident": ident,
+ "gecos": params[3]
+ }
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"]
|
Update the USER command to take advantage of core capabilities as well
|
## Code Before:
from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
def onUse(self, user, params):
if user.registered == 0:
self.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
if not user.username:
user.registered -= 1
user.username = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
if not user.username:
user.registered += 1
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
return
user.realname = params[3]
if user.registered == 0:
user.register()
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"]
## Instruction:
Update the USER command to take advantage of core capabilities as well
## Code After:
from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
def onUse(self, user, data):
if not user.username:
user.registered -= 1
user.username = data["ident"]
user.realname = data["gecos"]
if user.registered == 0:
user.register()
def processParams(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return {}
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
return {}
ident = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
if not ident:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
return {}
return {
"user": user,
"ident": ident,
"gecos": params[3]
}
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"]
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
- def onUse(self, user, params):
? ^ ^ --
+ def onUse(self, user, data):
? ^ ^
+ if not user.username:
+ user.registered -= 1
+ user.username = data["ident"]
+ user.realname = data["gecos"]
if user.registered == 0:
+ user.register()
+
+ def processParams(self, user, params):
+ if user.registered == 0:
- self.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
? ^^
+ user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
? + ^
- return
+ return {}
? +++
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
+ return {}
- if not user.username:
- user.registered -= 1
- user.username = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
? ^^ ------ ^^^
+ ident = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
? ^^ ^
+ if not ident:
- if not user.username:
- user.registered += 1
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
- return
+ return {}
? +++
- user.realname = params[3]
- if user.registered == 0:
- user.register()
+ return {
+ "user": user,
+ "ident": ident,
+ "gecos": params[3]
+ }
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"]
|
ab55f28592956cc6c9abbea31c2b0d66e13cddc1
|
src/pygrapes/adapter/__init__.py
|
src/pygrapes/adapter/__init__.py
|
__author__ = "mib"
__date__ = "$2011-01-22 12:02:41$"
from abstract import Abstract
from local import Local
__all__ = ['Abstract', 'Local']
|
__author__ = "mib"
__date__ = "$2011-01-22 12:02:41$"
from pygrapes.util import not_implemented
from pygrapes.adapter.abstract import Abstract
from pygrapes.adapter.local import Local
try:
from pygrapes.adapter.zeromq import Zmq
except ImportError:
Zmq = not_implemented('A working pyzmq lib is required!')
try:
from pygrapes.adapter.amqp import Amqp
except ImportError:
Amqp = not_implemented('A working amqplib lib is required!')
__all__ = ['Abstract', 'Amqp', 'Local', 'Zmq']
|
Load conditionally all available adapters in order to make them available right inside pygrapes.adapter module
|
Load conditionally all available adapters in order to make them available right inside pygrapes.adapter module
|
Python
|
bsd-3-clause
|
michalbachowski/pygrapes,michalbachowski/pygrapes,michalbachowski/pygrapes
|
__author__ = "mib"
__date__ = "$2011-01-22 12:02:41$"
+ from pygrapes.util import not_implemented
- from abstract import Abstract
+ from pygrapes.adapter.abstract import Abstract
- from local import Local
+ from pygrapes.adapter.local import Local
- __all__ = ['Abstract', 'Local']
+ try:
+ from pygrapes.adapter.zeromq import Zmq
+ except ImportError:
+ Zmq = not_implemented('A working pyzmq lib is required!')
+ try:
+ from pygrapes.adapter.amqp import Amqp
+ except ImportError:
+ Amqp = not_implemented('A working amqplib lib is required!')
+
+
+ __all__ = ['Abstract', 'Amqp', 'Local', 'Zmq']
+
|
Load conditionally all available adapters in order to make them available right inside pygrapes.adapter module
|
## Code Before:
__author__ = "mib"
__date__ = "$2011-01-22 12:02:41$"
from abstract import Abstract
from local import Local
__all__ = ['Abstract', 'Local']
## Instruction:
Load conditionally all available adapters in order to make them available right inside pygrapes.adapter module
## Code After:
__author__ = "mib"
__date__ = "$2011-01-22 12:02:41$"
from pygrapes.util import not_implemented
from pygrapes.adapter.abstract import Abstract
from pygrapes.adapter.local import Local
try:
from pygrapes.adapter.zeromq import Zmq
except ImportError:
Zmq = not_implemented('A working pyzmq lib is required!')
try:
from pygrapes.adapter.amqp import Amqp
except ImportError:
Amqp = not_implemented('A working amqplib lib is required!')
__all__ = ['Abstract', 'Amqp', 'Local', 'Zmq']
|
__author__ = "mib"
__date__ = "$2011-01-22 12:02:41$"
+ from pygrapes.util import not_implemented
- from abstract import Abstract
+ from pygrapes.adapter.abstract import Abstract
? +++++++++++++++++
- from local import Local
+ from pygrapes.adapter.local import Local
+ try:
+ from pygrapes.adapter.zeromq import Zmq
+ except ImportError:
+ Zmq = not_implemented('A working pyzmq lib is required!')
+
+ try:
+ from pygrapes.adapter.amqp import Amqp
+ except ImportError:
+ Amqp = not_implemented('A working amqplib lib is required!')
+
+
- __all__ = ['Abstract', 'Local']
+ __all__ = ['Abstract', 'Amqp', 'Local', 'Zmq']
? ++++++++ +++++++
|
833f8ce0673701eb64fb20ee067ccd8c58e473c6
|
child_sync_typo3/wizard/child_depart_wizard.py
|
child_sync_typo3/wizard/child_depart_wizard.py
|
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class child_depart_wizard(orm.TransientModel):
_inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(child_depart_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
Correct wrong inheritance on sponsorship_typo3 child_depart wizard.
|
Correct wrong inheritance on sponsorship_typo3 child_depart wizard.
|
Python
|
agpl-3.0
|
MickSandoz/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,ndtran/compassion-switzerland,ndtran/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland
|
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
- class end_sponsorship_wizard(orm.TransientModel):
+ class child_depart_wizard(orm.TransientModel):
- _inherit = 'end.sponsorship.wizard'
+ _inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
- res = super(end_sponsorship_wizard, self).child_depart(
+ res = super(child_depart_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
Correct wrong inheritance on sponsorship_typo3 child_depart wizard.
|
## Code Before:
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
## Instruction:
Correct wrong inheritance on sponsorship_typo3 child_depart wizard.
## Code After:
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class child_depart_wizard(orm.TransientModel):
_inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(child_depart_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
- class end_sponsorship_wizard(orm.TransientModel):
? ^^ ^ ^^^^ ^^^^
+ class child_depart_wizard(orm.TransientModel):
? ^^^^ ^^ ^ ^
- _inherit = 'end.sponsorship.wizard'
+ _inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
- res = super(end_sponsorship_wizard, self).child_depart(
? ^^ ^ ^^^^ ^^^^
+ res = super(child_depart_wizard, self).child_depart(
? ^^^^ ^^ ^ ^
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
d1cc22a5ab94b6df503679cc8b6a19948f4049c9
|
maildump/web_realtime.py
|
maildump/web_realtime.py
|
from flask import current_app
from gevent.queue import Queue
clients = set()
def broadcast(event, data=None):
for q in clients:
q.put((event, data))
def handle_sse_request():
return current_app.response_class(_gen(), mimetype='text/event-stream')
def _gen():
yield _sse('connected')
q = Queue()
clients.add(q)
while True:
msg = q.get()
try:
yield _sse(*msg)
except GeneratorExit:
clients.remove(q)
raise
def _sse(event, data=None):
parts = [f'event: {event}', f'data: {data or ""}']
return ('\r\n'.join(parts) + '\r\n\r\n').encode()
|
from flask import current_app
from gevent.queue import Empty, Queue
clients = set()
def broadcast(event, data=None):
for q in clients:
q.put((event, data))
def handle_sse_request():
return current_app.response_class(_gen(), mimetype='text/event-stream')
def _gen():
yield _sse('connected')
q = Queue()
clients.add(q)
while True:
try:
msg = q.get(timeout=60)
except Empty:
yield _sse('ping')
continue
try:
yield _sse(*msg)
except GeneratorExit:
clients.remove(q)
raise
def _sse(event, data=None):
parts = [f'event: {event}', f'data: {data or ""}']
return ('\r\n'.join(parts) + '\r\n\r\n').encode()
|
Send regular ping on SSE channel
|
Send regular ping on SSE channel
Otherwise dead clients stay around for a very long time (they are
only detected and removed when sending data fails, and that used to
depend on new emails arriving).
|
Python
|
mit
|
ThiefMaster/maildump,ThiefMaster/maildump,ThiefMaster/maildump,ThiefMaster/maildump
|
from flask import current_app
- from gevent.queue import Queue
+ from gevent.queue import Empty, Queue
clients = set()
def broadcast(event, data=None):
for q in clients:
q.put((event, data))
def handle_sse_request():
return current_app.response_class(_gen(), mimetype='text/event-stream')
def _gen():
yield _sse('connected')
q = Queue()
clients.add(q)
while True:
+ try:
- msg = q.get()
+ msg = q.get(timeout=60)
+ except Empty:
+ yield _sse('ping')
+ continue
try:
yield _sse(*msg)
except GeneratorExit:
clients.remove(q)
raise
def _sse(event, data=None):
parts = [f'event: {event}', f'data: {data or ""}']
return ('\r\n'.join(parts) + '\r\n\r\n').encode()
|
Send regular ping on SSE channel
|
## Code Before:
from flask import current_app
from gevent.queue import Queue
clients = set()
def broadcast(event, data=None):
for q in clients:
q.put((event, data))
def handle_sse_request():
return current_app.response_class(_gen(), mimetype='text/event-stream')
def _gen():
yield _sse('connected')
q = Queue()
clients.add(q)
while True:
msg = q.get()
try:
yield _sse(*msg)
except GeneratorExit:
clients.remove(q)
raise
def _sse(event, data=None):
parts = [f'event: {event}', f'data: {data or ""}']
return ('\r\n'.join(parts) + '\r\n\r\n').encode()
## Instruction:
Send regular ping on SSE channel
## Code After:
from flask import current_app
from gevent.queue import Empty, Queue
clients = set()
def broadcast(event, data=None):
for q in clients:
q.put((event, data))
def handle_sse_request():
return current_app.response_class(_gen(), mimetype='text/event-stream')
def _gen():
yield _sse('connected')
q = Queue()
clients.add(q)
while True:
try:
msg = q.get(timeout=60)
except Empty:
yield _sse('ping')
continue
try:
yield _sse(*msg)
except GeneratorExit:
clients.remove(q)
raise
def _sse(event, data=None):
parts = [f'event: {event}', f'data: {data or ""}']
return ('\r\n'.join(parts) + '\r\n\r\n').encode()
|
from flask import current_app
- from gevent.queue import Queue
+ from gevent.queue import Empty, Queue
? +++++++
clients = set()
def broadcast(event, data=None):
for q in clients:
q.put((event, data))
def handle_sse_request():
return current_app.response_class(_gen(), mimetype='text/event-stream')
def _gen():
yield _sse('connected')
q = Queue()
clients.add(q)
while True:
+ try:
- msg = q.get()
+ msg = q.get(timeout=60)
? ++++ ++++++++++
+ except Empty:
+ yield _sse('ping')
+ continue
try:
yield _sse(*msg)
except GeneratorExit:
clients.remove(q)
raise
def _sse(event, data=None):
parts = [f'event: {event}', f'data: {data or ""}']
return ('\r\n'.join(parts) + '\r\n\r\n').encode()
|
a69bd95c2e732f22aac555884904bbe7d9d0a1b9
|
src/dynamic_fixtures/management/commands/load_dynamic_fixtures.py
|
src/dynamic_fixtures/management/commands/load_dynamic_fixtures.py
|
from django.core.management.base import BaseCommand
from dynamic_fixtures.fixtures.runner import LoadFixtureRunner
class Command(BaseCommand):
help_text = 'Load fixtures while keeping dependencies in mind.'
args = '[app_label] [fixture_name]'
def handle(self, *args, **options):
runner = LoadFixtureRunner()
nodes = None
if len(args) == 1:
nodes = runner.get_app_nodes(app_label=args[0])
elif len(args) == 2:
nodes = runner.get_fixture_node(app_label=args[0],
fixture_prefix=args[1])
fixture_count = runner.load_fixtures(
nodes=nodes,
progress_callback=self.progress_callback
)
self.stdout.write('Loaded {} fixtures'.format(fixture_count))
def progress_callback(self, action, node):
if action == 'load_start':
self.stdout.write('Loading fixture {}.{}...'.format(*node),
ending='')
self.stdout.flush()
elif action == 'load_success':
self.stdout.write('SUCCESS')
|
from django.core.management.base import BaseCommand
from dynamic_fixtures.fixtures.runner import LoadFixtureRunner
class Command(BaseCommand):
help_text = 'Load fixtures while keeping dependencies in mind.'
args = '[app_label] [fixture_name]'
def add_arguments(self, parser):
parser.add_argument('app_label', type=str)
parser.add_argument('fixture_name', default=None, nargs='?', type=str)
def handle(self, *args, **options):
runner = LoadFixtureRunner()
nodes = None
if len(args) == 0:
if options['fixture_name'] is None:
args = (options['app_label'], )
else:
args = (options['app_label'], options['fixture_name'])
if len(args) == 1:
nodes = runner.get_app_nodes(app_label=args[0])
elif len(args) == 2:
nodes = runner.get_fixture_node(app_label=args[0],
fixture_prefix=args[1])
fixture_count = runner.load_fixtures(
nodes=nodes,
progress_callback=self.progress_callback
)
self.stdout.write('Loaded {} fixtures'.format(fixture_count))
def progress_callback(self, action, node):
if action == 'load_start':
self.stdout.write('Loading fixture {}.{}...'.format(*node),
ending='')
self.stdout.flush()
elif action == 'load_success':
self.stdout.write('SUCCESS')
|
Fix Command compatibility with Django>= 1.8
|
Fix Command compatibility with Django>= 1.8
|
Python
|
mit
|
Peter-Slump/django-factory-boy-fixtures,Peter-Slump/django-dynamic-fixtures
|
from django.core.management.base import BaseCommand
from dynamic_fixtures.fixtures.runner import LoadFixtureRunner
class Command(BaseCommand):
help_text = 'Load fixtures while keeping dependencies in mind.'
args = '[app_label] [fixture_name]'
+ def add_arguments(self, parser):
+ parser.add_argument('app_label', type=str)
+ parser.add_argument('fixture_name', default=None, nargs='?', type=str)
+
def handle(self, *args, **options):
runner = LoadFixtureRunner()
nodes = None
+
+ if len(args) == 0:
+ if options['fixture_name'] is None:
+ args = (options['app_label'], )
+ else:
+ args = (options['app_label'], options['fixture_name'])
+
if len(args) == 1:
nodes = runner.get_app_nodes(app_label=args[0])
elif len(args) == 2:
nodes = runner.get_fixture_node(app_label=args[0],
fixture_prefix=args[1])
fixture_count = runner.load_fixtures(
nodes=nodes,
progress_callback=self.progress_callback
)
self.stdout.write('Loaded {} fixtures'.format(fixture_count))
def progress_callback(self, action, node):
if action == 'load_start':
self.stdout.write('Loading fixture {}.{}...'.format(*node),
ending='')
self.stdout.flush()
elif action == 'load_success':
self.stdout.write('SUCCESS')
|
Fix Command compatibility with Django>= 1.8
|
## Code Before:
from django.core.management.base import BaseCommand
from dynamic_fixtures.fixtures.runner import LoadFixtureRunner
class Command(BaseCommand):
help_text = 'Load fixtures while keeping dependencies in mind.'
args = '[app_label] [fixture_name]'
def handle(self, *args, **options):
runner = LoadFixtureRunner()
nodes = None
if len(args) == 1:
nodes = runner.get_app_nodes(app_label=args[0])
elif len(args) == 2:
nodes = runner.get_fixture_node(app_label=args[0],
fixture_prefix=args[1])
fixture_count = runner.load_fixtures(
nodes=nodes,
progress_callback=self.progress_callback
)
self.stdout.write('Loaded {} fixtures'.format(fixture_count))
def progress_callback(self, action, node):
if action == 'load_start':
self.stdout.write('Loading fixture {}.{}...'.format(*node),
ending='')
self.stdout.flush()
elif action == 'load_success':
self.stdout.write('SUCCESS')
## Instruction:
Fix Command compatibility with Django>= 1.8
## Code After:
from django.core.management.base import BaseCommand
from dynamic_fixtures.fixtures.runner import LoadFixtureRunner
class Command(BaseCommand):
help_text = 'Load fixtures while keeping dependencies in mind.'
args = '[app_label] [fixture_name]'
def add_arguments(self, parser):
parser.add_argument('app_label', type=str)
parser.add_argument('fixture_name', default=None, nargs='?', type=str)
def handle(self, *args, **options):
runner = LoadFixtureRunner()
nodes = None
if len(args) == 0:
if options['fixture_name'] is None:
args = (options['app_label'], )
else:
args = (options['app_label'], options['fixture_name'])
if len(args) == 1:
nodes = runner.get_app_nodes(app_label=args[0])
elif len(args) == 2:
nodes = runner.get_fixture_node(app_label=args[0],
fixture_prefix=args[1])
fixture_count = runner.load_fixtures(
nodes=nodes,
progress_callback=self.progress_callback
)
self.stdout.write('Loaded {} fixtures'.format(fixture_count))
def progress_callback(self, action, node):
if action == 'load_start':
self.stdout.write('Loading fixture {}.{}...'.format(*node),
ending='')
self.stdout.flush()
elif action == 'load_success':
self.stdout.write('SUCCESS')
|
from django.core.management.base import BaseCommand
from dynamic_fixtures.fixtures.runner import LoadFixtureRunner
class Command(BaseCommand):
help_text = 'Load fixtures while keeping dependencies in mind.'
args = '[app_label] [fixture_name]'
+ def add_arguments(self, parser):
+ parser.add_argument('app_label', type=str)
+ parser.add_argument('fixture_name', default=None, nargs='?', type=str)
+
def handle(self, *args, **options):
runner = LoadFixtureRunner()
nodes = None
+
+ if len(args) == 0:
+ if options['fixture_name'] is None:
+ args = (options['app_label'], )
+ else:
+ args = (options['app_label'], options['fixture_name'])
+
if len(args) == 1:
nodes = runner.get_app_nodes(app_label=args[0])
elif len(args) == 2:
nodes = runner.get_fixture_node(app_label=args[0],
fixture_prefix=args[1])
fixture_count = runner.load_fixtures(
nodes=nodes,
progress_callback=self.progress_callback
)
self.stdout.write('Loaded {} fixtures'.format(fixture_count))
def progress_callback(self, action, node):
if action == 'load_start':
self.stdout.write('Loading fixture {}.{}...'.format(*node),
ending='')
self.stdout.flush()
elif action == 'load_success':
self.stdout.write('SUCCESS')
|
d9024a447ab097e2becd9444d42b7639389e2846
|
mail/app/handlers/amqp.py
|
mail/app/handlers/amqp.py
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message})
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message}, serializer="pickle")
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
Change carrot serialization from JSON to 'pickle'
|
Change carrot serialization from JSON to 'pickle'
|
Python
|
apache-2.0
|
leotrubach/sourceforge-allura,Bitergia/allura,apache/allura,Bitergia/allura,lym/allura-git,apache/allura,apache/allura,apache/incubator-allura,leotrubach/sourceforge-allura,heiths/allura,apache/allura,Bitergia/allura,leotrubach/sourceforge-allura,Bitergia/allura,heiths/allura,leotrubach/sourceforge-allura,Bitergia/allura,lym/allura-git,apache/allura,heiths/allura,heiths/allura,lym/allura-git,apache/incubator-allura,lym/allura-git,lym/allura-git,apache/incubator-allura,apache/incubator-allura,heiths/allura
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
- publisher.send({"message": message})
+ publisher.send({"message": message}, serializer="pickle")
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
Change carrot serialization from JSON to 'pickle'
|
## Code Before:
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message})
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
## Instruction:
Change carrot serialization from JSON to 'pickle'
## Code After:
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message}, serializer="pickle")
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
- publisher.send({"message": message})
+ publisher.send({"message": message}, serializer="pickle")
? +++++++++++++++++++++
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
f2ef48c3b1753e4b53b86c1f9d7a3da517a6d136
|
web/impact/impact/models/utils.py
|
web/impact/impact/models/utils.py
|
import re
LABEL_LENGTH = 255
def is_managed(db_table):
return False
def model_name_to_snake(value):
original_model_string = re.findall('[A-Z][^A-Z]*', value)
holder = ""
for word in original_model_string:
holder += word.lower() + "_"
new_model_string = holder[:-1]
return new_model_string
def snake_to_model_name(value):
old_value = value.split('_')
new_value = ""
for word in old_value:
new_value += (word[0].upper() + word[1:])
return new_value
|
import re
from django.utils.text import camel_case_to_spaces
LABEL_LENGTH = 255
def is_managed(db_table):
return False
def model_name_to_snake(value):
original_model_string = camel_case_to_spaces(value)
new_model_string = original_model_string.replace(" ", "_")
return new_model_string
def snake_to_model_name(value):
old_value = value.split('_')
new_value = ""
for word in old_value:
new_value += (word[0].upper() + word[1:])
return new_value
|
Remove Custom Reegex And Use Django Util For Case Conversion
|
[AC-5010] Remove Custom Reegex And Use Django Util For Case Conversion
This commit uses the django built in to switch from camel case to lower case. Then the loop was removed in favor of replace().
|
Python
|
mit
|
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
|
import re
+ from django.utils.text import camel_case_to_spaces
LABEL_LENGTH = 255
def is_managed(db_table):
return False
def model_name_to_snake(value):
+ original_model_string = camel_case_to_spaces(value)
+ new_model_string = original_model_string.replace(" ", "_")
- original_model_string = re.findall('[A-Z][^A-Z]*', value)
- holder = ""
- for word in original_model_string:
- holder += word.lower() + "_"
- new_model_string = holder[:-1]
return new_model_string
def snake_to_model_name(value):
old_value = value.split('_')
new_value = ""
for word in old_value:
new_value += (word[0].upper() + word[1:])
return new_value
|
Remove Custom Reegex And Use Django Util For Case Conversion
|
## Code Before:
import re
LABEL_LENGTH = 255
def is_managed(db_table):
return False
def model_name_to_snake(value):
original_model_string = re.findall('[A-Z][^A-Z]*', value)
holder = ""
for word in original_model_string:
holder += word.lower() + "_"
new_model_string = holder[:-1]
return new_model_string
def snake_to_model_name(value):
old_value = value.split('_')
new_value = ""
for word in old_value:
new_value += (word[0].upper() + word[1:])
return new_value
## Instruction:
Remove Custom Reegex And Use Django Util For Case Conversion
## Code After:
import re
from django.utils.text import camel_case_to_spaces
LABEL_LENGTH = 255
def is_managed(db_table):
return False
def model_name_to_snake(value):
original_model_string = camel_case_to_spaces(value)
new_model_string = original_model_string.replace(" ", "_")
return new_model_string
def snake_to_model_name(value):
old_value = value.split('_')
new_value = ""
for word in old_value:
new_value += (word[0].upper() + word[1:])
return new_value
|
import re
+ from django.utils.text import camel_case_to_spaces
LABEL_LENGTH = 255
def is_managed(db_table):
return False
def model_name_to_snake(value):
+ original_model_string = camel_case_to_spaces(value)
+ new_model_string = original_model_string.replace(" ", "_")
- original_model_string = re.findall('[A-Z][^A-Z]*', value)
- holder = ""
- for word in original_model_string:
- holder += word.lower() + "_"
- new_model_string = holder[:-1]
return new_model_string
def snake_to_model_name(value):
old_value = value.split('_')
new_value = ""
for word in old_value:
new_value += (word[0].upper() + word[1:])
return new_value
|
a0420b066e0a5064ebe0944a16348debf107a9a4
|
speech.py
|
speech.py
|
import time
from say import say
def introduction(tts):
"""Make Nao introduce itself.
Keyword arguments:
tts - Nao proxy.
"""
say("Hello world!", tts)
say("Computer Science is one of the coolest subjects to study in the\
modern world.", tts)
say("Programming is a tool used by Scientists and Engineers to create\
all kinds of interesting things.", tts)
say("For example, you can program mobile phones, laptops, cars, the\
Internet and more. Every day more things are being enhanced by\
Artificial Intelligence, like me.", tts)
say("Now I want to talk about what programming is like.", tts)
say("Programming is about solving puzzles of the real world and helping\
people deal with important problems to make the world a better\
place.", tts)
time.sleep(2)
say("Now, what topic would you like to learn about? Show me a number to\
choose the activity.", tts)
say("Number one to practice input and output.", tts)
|
import time
from say import say
def introduction(tts):
"""Make Nao introduce itself.
Keyword arguments:
tts - Nao proxy.
"""
say("Hello world!", tts)
say("I'm Leyva and I will teach you how to program in a programming\
language called python", tts)
say("Computer Science is one of the coolest subjects to study in the\
modern world.", tts)
say("Programming is a tool used by Scientists and Engineers to create\
all kinds of interesting things.", tts)
say("For example, you can program mobile phones, laptops, cars, the\
Internet and more. Every day more things are being enhanced by\
Artificial Intelligence, like me.", tts)
say("Now I want to talk about what programming is like.", tts)
say("Programming is about solving puzzles of the real world and helping\
people deal with important problems to make the world a better\
place.", tts)
time.sleep(2)
say("Now, what topic would you like to learn about? Show me a number to\
choose the activity.", tts)
say("Number one to practice input and output.", tts)
|
Make Nao say its name and the programming language
|
Make Nao say its name and the programming language
|
Python
|
mit
|
AliGhahraei/nao-classroom
|
import time
from say import say
def introduction(tts):
"""Make Nao introduce itself.
Keyword arguments:
tts - Nao proxy.
"""
say("Hello world!", tts)
+ say("I'm Leyva and I will teach you how to program in a programming\
+ language called python", tts)
say("Computer Science is one of the coolest subjects to study in the\
modern world.", tts)
say("Programming is a tool used by Scientists and Engineers to create\
all kinds of interesting things.", tts)
say("For example, you can program mobile phones, laptops, cars, the\
Internet and more. Every day more things are being enhanced by\
Artificial Intelligence, like me.", tts)
say("Now I want to talk about what programming is like.", tts)
say("Programming is about solving puzzles of the real world and helping\
people deal with important problems to make the world a better\
place.", tts)
time.sleep(2)
say("Now, what topic would you like to learn about? Show me a number to\
choose the activity.", tts)
say("Number one to practice input and output.", tts)
|
Make Nao say its name and the programming language
|
## Code Before:
import time
from say import say
def introduction(tts):
"""Make Nao introduce itself.
Keyword arguments:
tts - Nao proxy.
"""
say("Hello world!", tts)
say("Computer Science is one of the coolest subjects to study in the\
modern world.", tts)
say("Programming is a tool used by Scientists and Engineers to create\
all kinds of interesting things.", tts)
say("For example, you can program mobile phones, laptops, cars, the\
Internet and more. Every day more things are being enhanced by\
Artificial Intelligence, like me.", tts)
say("Now I want to talk about what programming is like.", tts)
say("Programming is about solving puzzles of the real world and helping\
people deal with important problems to make the world a better\
place.", tts)
time.sleep(2)
say("Now, what topic would you like to learn about? Show me a number to\
choose the activity.", tts)
say("Number one to practice input and output.", tts)
## Instruction:
Make Nao say its name and the programming language
## Code After:
import time
from say import say
def introduction(tts):
"""Make Nao introduce itself.
Keyword arguments:
tts - Nao proxy.
"""
say("Hello world!", tts)
say("I'm Leyva and I will teach you how to program in a programming\
language called python", tts)
say("Computer Science is one of the coolest subjects to study in the\
modern world.", tts)
say("Programming is a tool used by Scientists and Engineers to create\
all kinds of interesting things.", tts)
say("For example, you can program mobile phones, laptops, cars, the\
Internet and more. Every day more things are being enhanced by\
Artificial Intelligence, like me.", tts)
say("Now I want to talk about what programming is like.", tts)
say("Programming is about solving puzzles of the real world and helping\
people deal with important problems to make the world a better\
place.", tts)
time.sleep(2)
say("Now, what topic would you like to learn about? Show me a number to\
choose the activity.", tts)
say("Number one to practice input and output.", tts)
|
import time
from say import say
def introduction(tts):
"""Make Nao introduce itself.
Keyword arguments:
tts - Nao proxy.
"""
say("Hello world!", tts)
+ say("I'm Leyva and I will teach you how to program in a programming\
+ language called python", tts)
say("Computer Science is one of the coolest subjects to study in the\
modern world.", tts)
say("Programming is a tool used by Scientists and Engineers to create\
all kinds of interesting things.", tts)
say("For example, you can program mobile phones, laptops, cars, the\
Internet and more. Every day more things are being enhanced by\
Artificial Intelligence, like me.", tts)
say("Now I want to talk about what programming is like.", tts)
say("Programming is about solving puzzles of the real world and helping\
people deal with important problems to make the world a better\
place.", tts)
time.sleep(2)
say("Now, what topic would you like to learn about? Show me a number to\
choose the activity.", tts)
say("Number one to practice input and output.", tts)
|
3934ed699cdb0b472c09ad238ee4284b0050869c
|
prime-factors/prime_factors.py
|
prime-factors/prime_factors.py
|
def prime_factors(n):
factors = []
factor = 2
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
factor += 1
return factors
|
def prime_factors(n):
factors = []
while n % 2 == 0:
factors += [2]
n //= 2
factor = 3
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
factor += 2
return factors
|
Make solution more efficient by only testing odd numbers
|
Make solution more efficient by only testing odd numbers
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
def prime_factors(n):
factors = []
+ while n % 2 == 0:
+ factors += [2]
+ n //= 2
- factor = 2
+ factor = 3
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
- factor += 1
+ factor += 2
return factors
|
Make solution more efficient by only testing odd numbers
|
## Code Before:
def prime_factors(n):
factors = []
factor = 2
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
factor += 1
return factors
## Instruction:
Make solution more efficient by only testing odd numbers
## Code After:
def prime_factors(n):
factors = []
while n % 2 == 0:
factors += [2]
n //= 2
factor = 3
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
factor += 2
return factors
|
def prime_factors(n):
factors = []
+ while n % 2 == 0:
+ factors += [2]
+ n //= 2
- factor = 2
? ^
+ factor = 3
? ^
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
- factor += 1
? ^
+ factor += 2
? ^
return factors
|
29f6a260e49a6955dd12d354400d9ee6cfd6ddc7
|
tests/qtcore/qstatemachine_test.py
|
tests/qtcore/qstatemachine_test.py
|
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
class QStateMachineTest(unittest.TestCase):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
app = QCoreApplication([])
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, app.quit)
app.exec_()
if __name__ == '__main__':
unittest.main()
|
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
from helper import UsesQCoreApplication
class QStateMachineTest(UsesQCoreApplication):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, self.app.quit)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
Add UsesQCoreApplication in state machine test
|
Add UsesQCoreApplication in state machine test
|
Python
|
lgpl-2.1
|
M4rtinK/pyside-bb10,enthought/pyside,M4rtinK/pyside-android,PySide/PySide,IronManMark20/pyside2,PySide/PySide,M4rtinK/pyside-bb10,RobinD42/pyside,BadSingleton/pyside2,PySide/PySide,qtproject/pyside-pyside,enthought/pyside,pankajp/pyside,pankajp/pyside,M4rtinK/pyside-android,PySide/PySide,BadSingleton/pyside2,gbaty/pyside2,qtproject/pyside-pyside,enthought/pyside,enthought/pyside,RobinD42/pyside,pankajp/pyside,pankajp/pyside,enthought/pyside,M4rtinK/pyside-android,M4rtinK/pyside-bb10,enthought/pyside,gbaty/pyside2,qtproject/pyside-pyside,PySide/PySide,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,M4rtinK/pyside-android,qtproject/pyside-pyside,gbaty/pyside2,RobinD42/pyside,BadSingleton/pyside2,RobinD42/pyside,enthought/pyside,RobinD42/pyside,gbaty/pyside2,IronManMark20/pyside2,M4rtinK/pyside-bb10,IronManMark20/pyside2,RobinD42/pyside,IronManMark20/pyside2,BadSingleton/pyside2,pankajp/pyside,M4rtinK/pyside-android,BadSingleton/pyside2,IronManMark20/pyside2,M4rtinK/pyside-android,gbaty/pyside2,RobinD42/pyside,qtproject/pyside-pyside
|
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
- class QStateMachineTest(unittest.TestCase):
+ from helper import UsesQCoreApplication
+
+ class QStateMachineTest(UsesQCoreApplication):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
- app = QCoreApplication([])
-
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
- QTimer.singleShot(100, app.quit)
+ QTimer.singleShot(100, self.app.quit)
- app.exec_()
+ self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
Add UsesQCoreApplication in state machine test
|
## Code Before:
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
class QStateMachineTest(unittest.TestCase):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
app = QCoreApplication([])
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, app.quit)
app.exec_()
if __name__ == '__main__':
unittest.main()
## Instruction:
Add UsesQCoreApplication in state machine test
## Code After:
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
from helper import UsesQCoreApplication
class QStateMachineTest(UsesQCoreApplication):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, self.app.quit)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
- class QStateMachineTest(unittest.TestCase):
+ from helper import UsesQCoreApplication
+
+ class QStateMachineTest(UsesQCoreApplication):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
- app = QCoreApplication([])
-
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
- QTimer.singleShot(100, app.quit)
+ QTimer.singleShot(100, self.app.quit)
? +++++
- app.exec_()
+ self.app.exec_()
? +++++
if __name__ == '__main__':
unittest.main()
|
cf026dbabffd92cb51baeb63c1e1e88045e946b9
|
netfields/forms.py
|
netfields/forms.py
|
import re
from IPy import IP
from django import forms
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
class NetInput(forms.Widget):
input_type = 'text'
def render(self, name, value, attrs=None):
# Default forms.Widget compares value != '' which breaks IP...
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value:
final_attrs['value'] = force_unicode(value)
return mark_safe(u'<input%s />' % forms.util.flatatt(final_attrs))
class NetAddressFormField(forms.Field):
widget = NetInput
default_error_messages = {
'invalid': u'Enter a valid IP Address.',
}
def __init__(self, *args, **kwargs):
super(NetAddressFormField, self).__init__(*args, **kwargs)
def to_python(self, value):
if not value:
return None
if isinstance(value, IP):
return value
return self.python_type(value)
MAC_RE = re.compile(r'^(([A-F0-9]{2}:){5}[A-F0-9]{2})$')
class MACAddressFormField(forms.RegexField):
default_error_messages = {
'invalid': u'Enter a valid MAC address.',
}
def __init__(self, *args, **kwargs):
super(MACAddressFormField, self).__init__(MAC_RE, *args, **kwargs)
|
import re
from IPy import IP
from django import forms
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
class NetInput(forms.Widget):
input_type = 'text'
def render(self, name, value, attrs=None):
# Default forms.Widget compares value != '' which breaks IP...
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value:
final_attrs['value'] = force_unicode(value)
return mark_safe(u'<input%s />' % forms.util.flatatt(final_attrs))
class NetAddressFormField(forms.Field):
widget = NetInput
default_error_messages = {
'invalid': u'Enter a valid IP Address.',
}
def __init__(self, *args, **kwargs):
super(NetAddressFormField, self).__init__(*args, **kwargs)
def to_python(self, value):
if not value:
return None
if isinstance(value, IP):
return value
return IP(value)
MAC_RE = re.compile(r'^(([A-F0-9]{2}:){5}[A-F0-9]{2})$')
class MACAddressFormField(forms.RegexField):
default_error_messages = {
'invalid': u'Enter a valid MAC address.',
}
def __init__(self, *args, **kwargs):
super(MACAddressFormField, self).__init__(MAC_RE, *args, **kwargs)
|
Fix casting in form to_python() method
|
Fix casting in form to_python() method
NetAddressFormField.to_python() was calling "self.python_type()" to
cast the form value to an IP() object. Unfortunately, for is no such
method defined here, or in the Django forms.Field() class, at least
in 1.4 and up
|
Python
|
bsd-3-clause
|
jmacul2/django-postgresql-netfields
|
import re
from IPy import IP
from django import forms
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
class NetInput(forms.Widget):
input_type = 'text'
def render(self, name, value, attrs=None):
# Default forms.Widget compares value != '' which breaks IP...
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value:
final_attrs['value'] = force_unicode(value)
return mark_safe(u'<input%s />' % forms.util.flatatt(final_attrs))
class NetAddressFormField(forms.Field):
widget = NetInput
default_error_messages = {
'invalid': u'Enter a valid IP Address.',
}
def __init__(self, *args, **kwargs):
super(NetAddressFormField, self).__init__(*args, **kwargs)
def to_python(self, value):
if not value:
return None
if isinstance(value, IP):
return value
- return self.python_type(value)
+ return IP(value)
MAC_RE = re.compile(r'^(([A-F0-9]{2}:){5}[A-F0-9]{2})$')
class MACAddressFormField(forms.RegexField):
default_error_messages = {
'invalid': u'Enter a valid MAC address.',
}
def __init__(self, *args, **kwargs):
super(MACAddressFormField, self).__init__(MAC_RE, *args, **kwargs)
|
Fix casting in form to_python() method
|
## Code Before:
import re
from IPy import IP
from django import forms
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
class NetInput(forms.Widget):
input_type = 'text'
def render(self, name, value, attrs=None):
# Default forms.Widget compares value != '' which breaks IP...
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value:
final_attrs['value'] = force_unicode(value)
return mark_safe(u'<input%s />' % forms.util.flatatt(final_attrs))
class NetAddressFormField(forms.Field):
widget = NetInput
default_error_messages = {
'invalid': u'Enter a valid IP Address.',
}
def __init__(self, *args, **kwargs):
super(NetAddressFormField, self).__init__(*args, **kwargs)
def to_python(self, value):
if not value:
return None
if isinstance(value, IP):
return value
return self.python_type(value)
MAC_RE = re.compile(r'^(([A-F0-9]{2}:){5}[A-F0-9]{2})$')
class MACAddressFormField(forms.RegexField):
default_error_messages = {
'invalid': u'Enter a valid MAC address.',
}
def __init__(self, *args, **kwargs):
super(MACAddressFormField, self).__init__(MAC_RE, *args, **kwargs)
## Instruction:
Fix casting in form to_python() method
## Code After:
import re
from IPy import IP
from django import forms
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
class NetInput(forms.Widget):
input_type = 'text'
def render(self, name, value, attrs=None):
# Default forms.Widget compares value != '' which breaks IP...
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value:
final_attrs['value'] = force_unicode(value)
return mark_safe(u'<input%s />' % forms.util.flatatt(final_attrs))
class NetAddressFormField(forms.Field):
widget = NetInput
default_error_messages = {
'invalid': u'Enter a valid IP Address.',
}
def __init__(self, *args, **kwargs):
super(NetAddressFormField, self).__init__(*args, **kwargs)
def to_python(self, value):
if not value:
return None
if isinstance(value, IP):
return value
return IP(value)
MAC_RE = re.compile(r'^(([A-F0-9]{2}:){5}[A-F0-9]{2})$')
class MACAddressFormField(forms.RegexField):
default_error_messages = {
'invalid': u'Enter a valid MAC address.',
}
def __init__(self, *args, **kwargs):
super(MACAddressFormField, self).__init__(MAC_RE, *args, **kwargs)
|
import re
from IPy import IP
from django import forms
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
class NetInput(forms.Widget):
input_type = 'text'
def render(self, name, value, attrs=None):
# Default forms.Widget compares value != '' which breaks IP...
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value:
final_attrs['value'] = force_unicode(value)
return mark_safe(u'<input%s />' % forms.util.flatatt(final_attrs))
class NetAddressFormField(forms.Field):
widget = NetInput
default_error_messages = {
'invalid': u'Enter a valid IP Address.',
}
def __init__(self, *args, **kwargs):
super(NetAddressFormField, self).__init__(*args, **kwargs)
def to_python(self, value):
if not value:
return None
if isinstance(value, IP):
return value
- return self.python_type(value)
+ return IP(value)
MAC_RE = re.compile(r'^(([A-F0-9]{2}:){5}[A-F0-9]{2})$')
class MACAddressFormField(forms.RegexField):
default_error_messages = {
'invalid': u'Enter a valid MAC address.',
}
def __init__(self, *args, **kwargs):
super(MACAddressFormField, self).__init__(MAC_RE, *args, **kwargs)
|
bc6001d6c25bdb5d83830e5a65fe5aea9fc1eb99
|
ume/cmd.py
|
ume/cmd.py
|
import logging as l
import argparse
from ume.utils import (
save_mat,
dynamic_load,
)
def parse_args():
p = argparse.ArgumentParser(
description='CLI interface UME')
p.add_argument('--config', dest='inifile', default='config.ini')
subparsers = p.add_subparsers(
dest='subparser_name',
help='sub-commands for instant action')
f_parser = subparsers.add_parser('feature')
f_parser.add_argument('-n', '--name', type=str, required=True)
subparsers.add_parser('validation')
subparsers.add_parser('prediction')
return p.parse_args()
def run_feature(args):
klass = dynamic_load(args.name)
result = klass()
save_mat(args.name, result)
def main():
l.basicConfig(format='%(asctime)s %(message)s', level=l.INFO)
args = parse_args()
if args.subparser_name == 'validate':
pass
elif args.subparser_name == 'predict':
pass
elif args.subparser_name == 'feature':
run_feature(args)
else:
raise RuntimeError("No such sub-command.")
|
import logging as l
import argparse
import os
from ume.utils import (
save_mat,
dynamic_load,
)
def parse_args():
p = argparse.ArgumentParser(
description='CLI interface UME')
p.add_argument('--config', dest='inifile', default='config.ini')
subparsers = p.add_subparsers(
dest='subparser_name',
help='sub-commands for instant action')
f_parser = subparsers.add_parser('feature')
f_parser.add_argument('-n', '--name', type=str, required=True)
i_parser = subparsers.add_parser('init')
subparsers.add_parser('validation')
subparsers.add_parser('prediction')
return p.parse_args()
def run_feature(args):
klass = dynamic_load(args.name)
result = klass()
save_mat(args.name, result)
def run_initialize(args):
pwd = os.getcwd()
os.makedirs(os.path.join(pwd, "data/input"))
os.makedirs(os.path.join(pwd, "data/output"))
os.makedirs(os.path.join(pwd, "data/working"))
os.makedirs(os.path.join(pwd, "note"))
os.makedirs(os.path.join(pwd, "trunk"))
def main():
l.basicConfig(format='%(asctime)s %(message)s', level=l.INFO)
args = parse_args()
if args.subparser_name == 'validate':
pass
elif args.subparser_name == 'predict':
pass
elif args.subparser_name == 'feature':
run_feature(args)
elif args.subparser_name == 'init':
run_initialize(args)
else:
raise RuntimeError("No such sub-command.")
|
Add init function to create directories
|
Add init function to create directories
|
Python
|
mit
|
smly/ume,smly/ume,smly/ume,smly/ume
|
import logging as l
import argparse
+ import os
from ume.utils import (
save_mat,
dynamic_load,
)
def parse_args():
p = argparse.ArgumentParser(
description='CLI interface UME')
p.add_argument('--config', dest='inifile', default='config.ini')
subparsers = p.add_subparsers(
dest='subparser_name',
help='sub-commands for instant action')
f_parser = subparsers.add_parser('feature')
f_parser.add_argument('-n', '--name', type=str, required=True)
+ i_parser = subparsers.add_parser('init')
subparsers.add_parser('validation')
subparsers.add_parser('prediction')
return p.parse_args()
def run_feature(args):
klass = dynamic_load(args.name)
result = klass()
save_mat(args.name, result)
+ def run_initialize(args):
+ pwd = os.getcwd()
+ os.makedirs(os.path.join(pwd, "data/input"))
+ os.makedirs(os.path.join(pwd, "data/output"))
+ os.makedirs(os.path.join(pwd, "data/working"))
+ os.makedirs(os.path.join(pwd, "note"))
+ os.makedirs(os.path.join(pwd, "trunk"))
+
+
def main():
l.basicConfig(format='%(asctime)s %(message)s', level=l.INFO)
args = parse_args()
if args.subparser_name == 'validate':
pass
elif args.subparser_name == 'predict':
pass
elif args.subparser_name == 'feature':
run_feature(args)
+ elif args.subparser_name == 'init':
+ run_initialize(args)
else:
raise RuntimeError("No such sub-command.")
|
Add init function to create directories
|
## Code Before:
import logging as l
import argparse
from ume.utils import (
save_mat,
dynamic_load,
)
def parse_args():
p = argparse.ArgumentParser(
description='CLI interface UME')
p.add_argument('--config', dest='inifile', default='config.ini')
subparsers = p.add_subparsers(
dest='subparser_name',
help='sub-commands for instant action')
f_parser = subparsers.add_parser('feature')
f_parser.add_argument('-n', '--name', type=str, required=True)
subparsers.add_parser('validation')
subparsers.add_parser('prediction')
return p.parse_args()
def run_feature(args):
klass = dynamic_load(args.name)
result = klass()
save_mat(args.name, result)
def main():
l.basicConfig(format='%(asctime)s %(message)s', level=l.INFO)
args = parse_args()
if args.subparser_name == 'validate':
pass
elif args.subparser_name == 'predict':
pass
elif args.subparser_name == 'feature':
run_feature(args)
else:
raise RuntimeError("No such sub-command.")
## Instruction:
Add init function to create directories
## Code After:
import logging as l
import argparse
import os
from ume.utils import (
save_mat,
dynamic_load,
)
def parse_args():
p = argparse.ArgumentParser(
description='CLI interface UME')
p.add_argument('--config', dest='inifile', default='config.ini')
subparsers = p.add_subparsers(
dest='subparser_name',
help='sub-commands for instant action')
f_parser = subparsers.add_parser('feature')
f_parser.add_argument('-n', '--name', type=str, required=True)
i_parser = subparsers.add_parser('init')
subparsers.add_parser('validation')
subparsers.add_parser('prediction')
return p.parse_args()
def run_feature(args):
klass = dynamic_load(args.name)
result = klass()
save_mat(args.name, result)
def run_initialize(args):
pwd = os.getcwd()
os.makedirs(os.path.join(pwd, "data/input"))
os.makedirs(os.path.join(pwd, "data/output"))
os.makedirs(os.path.join(pwd, "data/working"))
os.makedirs(os.path.join(pwd, "note"))
os.makedirs(os.path.join(pwd, "trunk"))
def main():
l.basicConfig(format='%(asctime)s %(message)s', level=l.INFO)
args = parse_args()
if args.subparser_name == 'validate':
pass
elif args.subparser_name == 'predict':
pass
elif args.subparser_name == 'feature':
run_feature(args)
elif args.subparser_name == 'init':
run_initialize(args)
else:
raise RuntimeError("No such sub-command.")
|
import logging as l
import argparse
+ import os
from ume.utils import (
save_mat,
dynamic_load,
)
def parse_args():
p = argparse.ArgumentParser(
description='CLI interface UME')
p.add_argument('--config', dest='inifile', default='config.ini')
subparsers = p.add_subparsers(
dest='subparser_name',
help='sub-commands for instant action')
f_parser = subparsers.add_parser('feature')
f_parser.add_argument('-n', '--name', type=str, required=True)
+ i_parser = subparsers.add_parser('init')
subparsers.add_parser('validation')
subparsers.add_parser('prediction')
return p.parse_args()
def run_feature(args):
klass = dynamic_load(args.name)
result = klass()
save_mat(args.name, result)
+ def run_initialize(args):
+ pwd = os.getcwd()
+ os.makedirs(os.path.join(pwd, "data/input"))
+ os.makedirs(os.path.join(pwd, "data/output"))
+ os.makedirs(os.path.join(pwd, "data/working"))
+ os.makedirs(os.path.join(pwd, "note"))
+ os.makedirs(os.path.join(pwd, "trunk"))
+
+
def main():
l.basicConfig(format='%(asctime)s %(message)s', level=l.INFO)
args = parse_args()
if args.subparser_name == 'validate':
pass
elif args.subparser_name == 'predict':
pass
elif args.subparser_name == 'feature':
run_feature(args)
+ elif args.subparser_name == 'init':
+ run_initialize(args)
else:
raise RuntimeError("No such sub-command.")
|
c5fc667a6d50677936d8ae457734562d207a034b
|
bluesky/tests/test_vertical_integration.py
|
bluesky/tests/test_vertical_integration.py
|
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import *
from bluesky.standard_config import RE
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
ev = db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import stepscan, det, motor
from bluesky.standard_config import gs
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
Update test after RE -> gs.RE change.
|
TST: Update test after RE -> gs.RE change.
|
Python
|
bsd-3-clause
|
sameera2004/bluesky,ericdill/bluesky,klauer/bluesky,klauer/bluesky,ericdill/bluesky,dchabot/bluesky,sameera2004/bluesky,dchabot/bluesky
|
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
- from bluesky.examples import *
+ from bluesky.examples import stepscan, det, motor
- from bluesky.standard_config import RE
+ from bluesky.standard_config import gs
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
- uid = RE(stepscan(det, motor), group='foo', beamline_id='testing',
+ uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
- ev = db.fetch_events(hdr)
+ db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
Update test after RE -> gs.RE change.
|
## Code Before:
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import *
from bluesky.standard_config import RE
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
ev = db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
## Instruction:
Update test after RE -> gs.RE change.
## Code After:
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import stepscan, det, motor
from bluesky.standard_config import gs
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
- from bluesky.examples import *
+ from bluesky.examples import stepscan, det, motor
- from bluesky.standard_config import RE
? ^^
+ from bluesky.standard_config import gs
? ^^
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
- uid = RE(stepscan(det, motor), group='foo', beamline_id='testing',
+ uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing',
? +++
config={})
hdr = db[uid]
- ev = db.fetch_events(hdr)
? -----
+ db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
53234eb1ab0bafe49b8e198336d7958fed3e3f61
|
awx/main/managers.py
|
awx/main/managers.py
|
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
return self.get(primary=True)
|
import sys
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
# If we are running unit tests, we are primary, because reasons.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return 'primary'
# Check if this instance is primary; if so, return "primary", otherwise
# "secondary".
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(primary=True)
|
Return stub records in testing.
|
Return stub records in testing.
|
Python
|
apache-2.0
|
snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx
|
+
+ import sys
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
+ # If we are running unit tests, return a stub record.
+ if len(sys.argv) >= 2 and sys.argv[1] == 'test':
+ return self.model(id=1, primary=True,
+ uuid='00000000-0000-0000-0000-000000000000')
+
+ # Return the appropriate record from the database.
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
+ # If we are running unit tests, we are primary, because reasons.
+ if len(sys.argv) >= 2 and sys.argv[1] == 'test':
+ return 'primary'
+
+ # Check if this instance is primary; if so, return "primary", otherwise
+ # "secondary".
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
+ # If we are running unit tests, return a stub record.
+ if len(sys.argv) >= 2 and sys.argv[1] == 'test':
+ return self.model(id=1, primary=True,
+ uuid='00000000-0000-0000-0000-000000000000')
+
+ # Return the appropriate record from the database.
return self.get(primary=True)
|
Return stub records in testing.
|
## Code Before:
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
return self.get(primary=True)
## Instruction:
Return stub records in testing.
## Code After:
import sys
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
# If we are running unit tests, we are primary, because reasons.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return 'primary'
# Check if this instance is primary; if so, return "primary", otherwise
# "secondary".
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(primary=True)
|
+
+ import sys
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
+ # If we are running unit tests, return a stub record.
+ if len(sys.argv) >= 2 and sys.argv[1] == 'test':
+ return self.model(id=1, primary=True,
+ uuid='00000000-0000-0000-0000-000000000000')
+
+ # Return the appropriate record from the database.
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
+ # If we are running unit tests, we are primary, because reasons.
+ if len(sys.argv) >= 2 and sys.argv[1] == 'test':
+ return 'primary'
+
+ # Check if this instance is primary; if so, return "primary", otherwise
+ # "secondary".
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
+ # If we are running unit tests, return a stub record.
+ if len(sys.argv) >= 2 and sys.argv[1] == 'test':
+ return self.model(id=1, primary=True,
+ uuid='00000000-0000-0000-0000-000000000000')
+
+ # Return the appropriate record from the database.
return self.get(primary=True)
|
fe32099bf1b6aa387c98dd6afdfc31557fc4e1f9
|
volpy/__init__.py
|
volpy/__init__.py
|
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
from .grid import Grid
from .homogeneous import (translate, scale, rotatex, rotatey, rotatez, rotatexyz,
rotate_axis, cross)
from .geometry import Geometry, BBox
|
'''
Volpy
=====
A fast volume rendering implementation for Python. Volpy has support for:
1. Multithreading or multiprocessing at the rendering step
2. Native implementation of ray casting
3. Native access to NumPy arrays during rendering
4. Support for ambient and diffuse lighting terms
How to use this package
-----------------------
Volpy is organized into several different modules but the API is imported into
the root of the package. Therefore, you should write your code like this:
>>> import volpy
>>> scene = volpy.Scene(ambient=my_func)
'''
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
from .grid import Grid
from .homogeneous import (translate, scale, rotatex, rotatey, rotatez, rotatexyz,
rotate_axis, cross)
from .geometry import Geometry, BBox
|
Write a docstring for the package
|
Write a docstring for the package
|
Python
|
mit
|
OEP/volpy,OEP/volpy
|
+ '''
+ Volpy
+ =====
+
+ A fast volume rendering implementation for Python. Volpy has support for:
+
+ 1. Multithreading or multiprocessing at the rendering step
+ 2. Native implementation of ray casting
+ 3. Native access to NumPy arrays during rendering
+ 4. Support for ambient and diffuse lighting terms
+
+ How to use this package
+ -----------------------
+
+ Volpy is organized into several different modules but the API is imported into
+ the root of the package. Therefore, you should write your code like this:
+
+ >>> import volpy
+ >>> scene = volpy.Scene(ambient=my_func)
+
+ '''
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
from .grid import Grid
from .homogeneous import (translate, scale, rotatex, rotatey, rotatez, rotatexyz,
rotate_axis, cross)
from .geometry import Geometry, BBox
|
Write a docstring for the package
|
## Code Before:
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
from .grid import Grid
from .homogeneous import (translate, scale, rotatex, rotatey, rotatez, rotatexyz,
rotate_axis, cross)
from .geometry import Geometry, BBox
## Instruction:
Write a docstring for the package
## Code After:
'''
Volpy
=====
A fast volume rendering implementation for Python. Volpy has support for:
1. Multithreading or multiprocessing at the rendering step
2. Native implementation of ray casting
3. Native access to NumPy arrays during rendering
4. Support for ambient and diffuse lighting terms
How to use this package
-----------------------
Volpy is organized into several different modules but the API is imported into
the root of the package. Therefore, you should write your code like this:
>>> import volpy
>>> scene = volpy.Scene(ambient=my_func)
'''
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
from .grid import Grid
from .homogeneous import (translate, scale, rotatex, rotatey, rotatez, rotatexyz,
rotate_axis, cross)
from .geometry import Geometry, BBox
|
+ '''
+ Volpy
+ =====
+
+ A fast volume rendering implementation for Python. Volpy has support for:
+
+ 1. Multithreading or multiprocessing at the rendering step
+ 2. Native implementation of ray casting
+ 3. Native access to NumPy arrays during rendering
+ 4. Support for ambient and diffuse lighting terms
+
+ How to use this package
+ -----------------------
+
+ Volpy is organized into several different modules but the API is imported into
+ the root of the package. Therefore, you should write your code like this:
+
+ >>> import volpy
+ >>> scene = volpy.Scene(ambient=my_func)
+
+ '''
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
from .grid import Grid
from .homogeneous import (translate, scale, rotatex, rotatey, rotatez, rotatexyz,
rotate_axis, cross)
from .geometry import Geometry, BBox
|
c7b7e62cb2585f6109d70b27564617b0be4c8c33
|
tests/test_daterange.py
|
tests/test_daterange.py
|
import os
import time
try:
import unittest2 as unittest
except ImportError:
import unittest
class DateRangeTest(unittest.TestCase):
def setUp(self):
self.openlicensefile = os.path.join(os.path.dirname(__file__), '../LICENSE.txt')
self.pattern = 'Copyright (c) 2012 - %s SendGrid, Inc.' % (time.strftime("%Y"))
self.licensefile = open(self.openlicensefile).read()
def test__daterange(self):
self.assertTrue(self.pattern in self.licensefile)
|
import os
import time
try:
import unittest2 as unittest
except ImportError:
import unittest
class DateRangeTest(unittest.TestCase):
def setUp(self):
self.openlicensefile = os.path.join(
os.path.dirname(__file__),
'../LICENSE.txt')
self.pattern = 'Copyright (c) 2012 - %s SendGrid, Inc.' % (
time.strftime("%Y"))
self.licensefile = open(self.openlicensefile).read()
def test__daterange(self):
self.assertTrue(self.pattern in self.licensefile)
|
Update code for PEP8 compliance
|
Update code for PEP8 compliance
|
Python
|
mit
|
sendgrid/python-http-client,sendgrid/python-http-client
|
import os
import time
try:
import unittest2 as unittest
except ImportError:
import unittest
+
class DateRangeTest(unittest.TestCase):
def setUp(self):
- self.openlicensefile = os.path.join(os.path.dirname(__file__), '../LICENSE.txt')
+ self.openlicensefile = os.path.join(
+ os.path.dirname(__file__),
+ '../LICENSE.txt')
- self.pattern = 'Copyright (c) 2012 - %s SendGrid, Inc.' % (time.strftime("%Y"))
+ self.pattern = 'Copyright (c) 2012 - %s SendGrid, Inc.' % (
+ time.strftime("%Y"))
self.licensefile = open(self.openlicensefile).read()
def test__daterange(self):
self.assertTrue(self.pattern in self.licensefile)
|
Update code for PEP8 compliance
|
## Code Before:
import os
import time
try:
import unittest2 as unittest
except ImportError:
import unittest
class DateRangeTest(unittest.TestCase):
def setUp(self):
self.openlicensefile = os.path.join(os.path.dirname(__file__), '../LICENSE.txt')
self.pattern = 'Copyright (c) 2012 - %s SendGrid, Inc.' % (time.strftime("%Y"))
self.licensefile = open(self.openlicensefile).read()
def test__daterange(self):
self.assertTrue(self.pattern in self.licensefile)
## Instruction:
Update code for PEP8 compliance
## Code After:
import os
import time
try:
import unittest2 as unittest
except ImportError:
import unittest
class DateRangeTest(unittest.TestCase):
def setUp(self):
self.openlicensefile = os.path.join(
os.path.dirname(__file__),
'../LICENSE.txt')
self.pattern = 'Copyright (c) 2012 - %s SendGrid, Inc.' % (
time.strftime("%Y"))
self.licensefile = open(self.openlicensefile).read()
def test__daterange(self):
self.assertTrue(self.pattern in self.licensefile)
|
import os
import time
try:
import unittest2 as unittest
except ImportError:
import unittest
+
class DateRangeTest(unittest.TestCase):
def setUp(self):
- self.openlicensefile = os.path.join(os.path.dirname(__file__), '../LICENSE.txt')
+ self.openlicensefile = os.path.join(
+ os.path.dirname(__file__),
+ '../LICENSE.txt')
- self.pattern = 'Copyright (c) 2012 - %s SendGrid, Inc.' % (time.strftime("%Y"))
? --------------------
+ self.pattern = 'Copyright (c) 2012 - %s SendGrid, Inc.' % (
+ time.strftime("%Y"))
self.licensefile = open(self.openlicensefile).read()
def test__daterange(self):
self.assertTrue(self.pattern in self.licensefile)
|
bf5ec5a459dc9dbe38a6806b513616aa769134a2
|
amqpy/tests/test_version.py
|
amqpy/tests/test_version.py
|
import re
def get_field(doc: str, name: str):
match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE)
if match:
return match.group(1).strip()
class TestVersion:
def test_version_is_consistent(self):
from .. import VERSION
with open('README.rst') as f:
readme = f.read()
version = get_field(readme, 'version')
version = version.split('.')
version = tuple([int(i) for i in version])
assert VERSION == version
|
import re
def get_field(doc: str, name: str):
match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE)
if match:
return match.group(1).strip()
class TestVersion:
def test_version_is_consistent(self):
from .. import VERSION
with open('README.rst') as f:
readme = f.read()
version = get_field(readme, 'version')
version = tuple(map(int, version.split('.')))
assert VERSION == version
|
Use `map` to test version
|
Use `map` to test version
|
Python
|
mit
|
veegee/amqpy,gst/amqpy
|
import re
def get_field(doc: str, name: str):
match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE)
if match:
return match.group(1).strip()
class TestVersion:
def test_version_is_consistent(self):
from .. import VERSION
with open('README.rst') as f:
readme = f.read()
version = get_field(readme, 'version')
- version = version.split('.')
+ version = tuple(map(int, version.split('.')))
- version = tuple([int(i) for i in version])
assert VERSION == version
|
Use `map` to test version
|
## Code Before:
import re
def get_field(doc: str, name: str):
match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE)
if match:
return match.group(1).strip()
class TestVersion:
def test_version_is_consistent(self):
from .. import VERSION
with open('README.rst') as f:
readme = f.read()
version = get_field(readme, 'version')
version = version.split('.')
version = tuple([int(i) for i in version])
assert VERSION == version
## Instruction:
Use `map` to test version
## Code After:
import re
def get_field(doc: str, name: str):
match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE)
if match:
return match.group(1).strip()
class TestVersion:
def test_version_is_consistent(self):
from .. import VERSION
with open('README.rst') as f:
readme = f.read()
version = get_field(readme, 'version')
version = tuple(map(int, version.split('.')))
assert VERSION == version
|
import re
def get_field(doc: str, name: str):
match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE)
if match:
return match.group(1).strip()
class TestVersion:
def test_version_is_consistent(self):
from .. import VERSION
with open('README.rst') as f:
readme = f.read()
version = get_field(readme, 'version')
- version = version.split('.')
+ version = tuple(map(int, version.split('.')))
? +++++++++++++++ ++
- version = tuple([int(i) for i in version])
assert VERSION == version
|
4d8b0fefa420efd60da0c6a29968f2fd441b9e09
|
openbox/configuration_builder/transformations.py
|
openbox/configuration_builder/transformations.py
|
def to_int(value):
return int(value)
def identity(value):
return value
|
def to_int(value, num=None):
return int(value)
def to_float(value, num=None):
return float(value)
def identity(value, num=None):
return value
|
Add to_float function and add num keyword
|
Transformations: Add to_float function and add num keyword
|
Python
|
apache-2.0
|
DeepnessLab/obsi,OpenBoxProject/obsi,DeepnessLab/obsi,pavel-lazar/obsi,pavel-lazar/obsi,OpenBoxProject/obsi,DeepnessLab/obsi,pavel-lazar/obsi,DeepnessLab/obsi,pavel-lazar/obsi,OpenBoxProject/obsi,OpenBoxProject/obsi
|
- def to_int(value):
+ def to_int(value, num=None):
return int(value)
+
+ def to_float(value, num=None):
+ return float(value)
+
+
- def identity(value):
+ def identity(value, num=None):
return value
|
Add to_float function and add num keyword
|
## Code Before:
def to_int(value):
return int(value)
def identity(value):
return value
## Instruction:
Add to_float function and add num keyword
## Code After:
def to_int(value, num=None):
return int(value)
def to_float(value, num=None):
return float(value)
def identity(value, num=None):
return value
|
- def to_int(value):
+ def to_int(value, num=None):
? ++++++++++
return int(value)
+
+ def to_float(value, num=None):
+ return float(value)
+
+
- def identity(value):
+ def identity(value, num=None):
? ++++++++++
return value
|
f551d23531ec4aab041494ac8af921eb77d6b2a0
|
nb_conda/__init__.py
|
nb_conda/__init__.py
|
from ._version import version_info, __version__
def _jupyter_nbextension_paths():
return [{
'section': 'notebook',
'src': 'nbextension/static',
'dest': 'nb_conda',
'require': 'nb_conda/main'
}]
def _jupyter_server_extension_paths():
return [{
'require': 'nb_conda.nbextension'
}]
|
from ._version import version_info, __version__
def _jupyter_nbextension_paths():
return [dict(section="notebook",
src="nbextension/static",
dest="nb_conda",
require="nb_conda/main")]
def _jupyter_server_extension_paths():
return [dict(module='nb_conda.nbextension')]
|
Update to the latest way to offer metadata
|
Update to the latest way to offer metadata
|
Python
|
bsd-3-clause
|
Anaconda-Server/nb_conda,Anaconda-Server/nb_conda,Anaconda-Server/nb_conda,Anaconda-Server/nb_conda
|
from ._version import version_info, __version__
+
def _jupyter_nbextension_paths():
+ return [dict(section="notebook",
- return [{
- 'section': 'notebook',
- 'src': 'nbextension/static',
+ src="nbextension/static",
- 'dest': 'nb_conda',
- 'require': 'nb_conda/main'
- }]
+ dest="nb_conda",
+ require="nb_conda/main")]
+
def _jupyter_server_extension_paths():
+ return [dict(module='nb_conda.nbextension')]
- return [{
- 'require': 'nb_conda.nbextension'
- }]
|
Update to the latest way to offer metadata
|
## Code Before:
from ._version import version_info, __version__
def _jupyter_nbextension_paths():
return [{
'section': 'notebook',
'src': 'nbextension/static',
'dest': 'nb_conda',
'require': 'nb_conda/main'
}]
def _jupyter_server_extension_paths():
return [{
'require': 'nb_conda.nbextension'
}]
## Instruction:
Update to the latest way to offer metadata
## Code After:
from ._version import version_info, __version__
def _jupyter_nbextension_paths():
return [dict(section="notebook",
src="nbextension/static",
dest="nb_conda",
require="nb_conda/main")]
def _jupyter_server_extension_paths():
return [dict(module='nb_conda.nbextension')]
|
from ._version import version_info, __version__
+
def _jupyter_nbextension_paths():
+ return [dict(section="notebook",
- return [{
- 'section': 'notebook',
- 'src': 'nbextension/static',
? ^ ^^^^ ^
+ src="nbextension/static",
? ^^^^^^^^^ ^^ ^
- 'dest': 'nb_conda',
- 'require': 'nb_conda/main'
- }]
+ dest="nb_conda",
+ require="nb_conda/main")]
+
def _jupyter_server_extension_paths():
+ return [dict(module='nb_conda.nbextension')]
- return [{
- 'require': 'nb_conda.nbextension'
- }]
|
8178bf161d39976405690d68d9ffe6c4dfd9d705
|
web/view_athena/views.py
|
web/view_athena/views.py
|
from django.shortcuts import render
from elasticsearch import Elasticsearch
from django.http import HttpResponse
def search(request):
if request.method == 'GET':
term = request.GET.get('term_search')
if term == None:
term = ""
response = search_term(term)
pages = []
for hit in response['hits']['hits']:
x = {'source': hit["_source"], 'highlight': hit["highlight"]["text"][0]}
pages.append(x)
return render(request, 'view_athena/index.html', {'pages':pages,'term_search':term})
def search_term(term):
es = Elasticsearch()
res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match": { "title": "\"" + str(term) + "\"" }},
{ "match": { "text": "\"" + str(term) + "\"" }},
{ "match": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
return res
|
from django.shortcuts import render
from elasticsearch import Elasticsearch
from django.http import HttpResponse
def search(request):
if request.method == 'GET':
term = request.GET.get('term_search')
if term == None:
term = ""
response = search_term(term)
pages = []
for hit in response['hits']['hits']:
x = {'source': hit["_source"], 'highlight': hit["highlight"]["text"][0]}
pages.append(x)
return render(request, 'view_athena/index.html', {'pages':pages,'term_search':term})
def search_term(term):
es = Elasticsearch()
res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match_phrase": { "title": "\"" + str(term) + "\"" }},
{ "match_phrase": { "text": "\"" + str(term) + "\"" }},
{ "match_phrase": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
return res
|
Update 'search_term' functon. Add 'match_phrase' function.
|
Update 'search_term' functon. Add 'match_phrase' function.
|
Python
|
mit
|
pattyvader/athena,pattyvader/athena,pattyvader/athena
|
from django.shortcuts import render
from elasticsearch import Elasticsearch
from django.http import HttpResponse
def search(request):
if request.method == 'GET':
term = request.GET.get('term_search')
if term == None:
term = ""
response = search_term(term)
pages = []
for hit in response['hits']['hits']:
x = {'source': hit["_source"], 'highlight': hit["highlight"]["text"][0]}
pages.append(x)
return render(request, 'view_athena/index.html', {'pages':pages,'term_search':term})
def search_term(term):
es = Elasticsearch()
- res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match": { "title": "\"" + str(term) + "\"" }},
+ res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match_phrase": { "title": "\"" + str(term) + "\"" }},
- { "match": { "text": "\"" + str(term) + "\"" }},
+ { "match_phrase": { "text": "\"" + str(term) + "\"" }},
- { "match": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
+ { "match_phrase": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
return res
|
Update 'search_term' functon. Add 'match_phrase' function.
|
## Code Before:
from django.shortcuts import render
from elasticsearch import Elasticsearch
from django.http import HttpResponse
def search(request):
if request.method == 'GET':
term = request.GET.get('term_search')
if term == None:
term = ""
response = search_term(term)
pages = []
for hit in response['hits']['hits']:
x = {'source': hit["_source"], 'highlight': hit["highlight"]["text"][0]}
pages.append(x)
return render(request, 'view_athena/index.html', {'pages':pages,'term_search':term})
def search_term(term):
es = Elasticsearch()
res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match": { "title": "\"" + str(term) + "\"" }},
{ "match": { "text": "\"" + str(term) + "\"" }},
{ "match": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
return res
## Instruction:
Update 'search_term' functon. Add 'match_phrase' function.
## Code After:
from django.shortcuts import render
from elasticsearch import Elasticsearch
from django.http import HttpResponse
def search(request):
if request.method == 'GET':
term = request.GET.get('term_search')
if term == None:
term = ""
response = search_term(term)
pages = []
for hit in response['hits']['hits']:
x = {'source': hit["_source"], 'highlight': hit["highlight"]["text"][0]}
pages.append(x)
return render(request, 'view_athena/index.html', {'pages':pages,'term_search':term})
def search_term(term):
es = Elasticsearch()
res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match_phrase": { "title": "\"" + str(term) + "\"" }},
{ "match_phrase": { "text": "\"" + str(term) + "\"" }},
{ "match_phrase": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
return res
|
from django.shortcuts import render
from elasticsearch import Elasticsearch
from django.http import HttpResponse
def search(request):
if request.method == 'GET':
term = request.GET.get('term_search')
if term == None:
term = ""
response = search_term(term)
pages = []
for hit in response['hits']['hits']:
x = {'source': hit["_source"], 'highlight': hit["highlight"]["text"][0]}
pages.append(x)
return render(request, 'view_athena/index.html', {'pages':pages,'term_search':term})
def search_term(term):
es = Elasticsearch()
- res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match": { "title": "\"" + str(term) + "\"" }},
+ res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match_phrase": { "title": "\"" + str(term) + "\"" }},
? +++++++
- { "match": { "text": "\"" + str(term) + "\"" }},
+ { "match_phrase": { "text": "\"" + str(term) + "\"" }},
? +++++++
- { "match": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
+ { "match_phrase": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
? +++++++
return res
|
6aea2f1c3a478be0c6926f442924e1f263955430
|
pip_run/__init__.py
|
pip_run/__init__.py
|
import sys
from . import deps
from . import commands
from . import launch
from . import scripts
def run(args=None):
if args is None:
args = sys.argv[1:]
pip_args, params = commands.parse_script_args(args)
commands.intercept(pip_args)
pip_args.extend(scripts.DepsReader.search(params))
with deps.load(*deps.not_installed(pip_args)) as home:
raise SystemExit(launch.with_path(home, params))
|
import sys
from . import deps
from . import commands
from . import launch
from . import scripts
def run(args=None):
"""
Main entry point for pip-run.
"""
if args is None:
args = sys.argv[1:]
pip_args, params = commands.parse_script_args(args)
commands.intercept(pip_args)
pip_args.extend(scripts.DepsReader.search(params))
with deps.load(*deps.not_installed(pip_args)) as home:
raise SystemExit(launch.with_path(home, params))
|
Add docstring to run function.
|
Add docstring to run function.
|
Python
|
mit
|
jaraco/rwt
|
import sys
from . import deps
from . import commands
from . import launch
from . import scripts
def run(args=None):
+ """
+ Main entry point for pip-run.
+ """
if args is None:
args = sys.argv[1:]
pip_args, params = commands.parse_script_args(args)
commands.intercept(pip_args)
pip_args.extend(scripts.DepsReader.search(params))
with deps.load(*deps.not_installed(pip_args)) as home:
raise SystemExit(launch.with_path(home, params))
|
Add docstring to run function.
|
## Code Before:
import sys
from . import deps
from . import commands
from . import launch
from . import scripts
def run(args=None):
if args is None:
args = sys.argv[1:]
pip_args, params = commands.parse_script_args(args)
commands.intercept(pip_args)
pip_args.extend(scripts.DepsReader.search(params))
with deps.load(*deps.not_installed(pip_args)) as home:
raise SystemExit(launch.with_path(home, params))
## Instruction:
Add docstring to run function.
## Code After:
import sys
from . import deps
from . import commands
from . import launch
from . import scripts
def run(args=None):
"""
Main entry point for pip-run.
"""
if args is None:
args = sys.argv[1:]
pip_args, params = commands.parse_script_args(args)
commands.intercept(pip_args)
pip_args.extend(scripts.DepsReader.search(params))
with deps.load(*deps.not_installed(pip_args)) as home:
raise SystemExit(launch.with_path(home, params))
|
import sys
from . import deps
from . import commands
from . import launch
from . import scripts
def run(args=None):
+ """
+ Main entry point for pip-run.
+ """
if args is None:
args = sys.argv[1:]
pip_args, params = commands.parse_script_args(args)
commands.intercept(pip_args)
pip_args.extend(scripts.DepsReader.search(params))
with deps.load(*deps.not_installed(pip_args)) as home:
raise SystemExit(launch.with_path(home, params))
|
f17a70980f1964e40a22fad5e54f4cafcdcf9d52
|
useless_passport_validator/ulibrary.py
|
useless_passport_validator/ulibrary.py
|
from collections import namedtuple
"""Document constants"""
countries = ["Mordor", "Gondor", "Lorien", "Shire"]
genders = ["Male", "Female"]
cities = {
'Mordor': 'Minas Morgul,Barad Dur',
'Gondor': 'Minas Tirith,Isengard,Osgiliath',
'Lorien': 'Lorien',
'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough'
}
purpose = ["Visit", "Transit", "Work", "Immigrate"]
"""Store user input here"""
UPassport = namedtuple("UPassport", "country name gender isscity expdate serial")
UPass = namedtuple("UPass", "name gender purpose duration serial expires")
UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires")
URecord = namedtuple("URecord", "purpose duration")
|
from collections import namedtuple
def init():
"""Document constants"""
global countries
countries = ["Mordor", "Gondor", "Lorien", "Shire"]
global genders
genders = ["Male", "Female"]
global cities
cities = {
'Mordor': 'Minas Morgul,Barad Dur',
'Gondor': 'Minas Tirith,Isengard,Osgiliath',
'Lorien': 'Lorien',
'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough'
}
global purpose
purpose = ["Visit", "Transit", "Work", "Immigrate"]
"""Store user input here"""
global UPassport
UPassport = namedtuple("UPassport", "country name gender isscity expdate serial")
global UPass
UPass = namedtuple("UPass", "name gender purpose duration serial expires")
global UWorkVisa
UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires")
global URecord
URecord = namedtuple("URecord", "purpose duration")
|
Define init function. Make variables actually global
|
Define init function. Make variables actually global
|
Python
|
mit
|
Hethurin/UApp
|
from collections import namedtuple
+ def init():
- """Document constants"""
+ """Document constants"""
+ global countries
- countries = ["Mordor", "Gondor", "Lorien", "Shire"]
+ countries = ["Mordor", "Gondor", "Lorien", "Shire"]
+ global genders
- genders = ["Male", "Female"]
+ genders = ["Male", "Female"]
+ global cities
- cities = {
+ cities = {
- 'Mordor': 'Minas Morgul,Barad Dur',
+ 'Mordor': 'Minas Morgul,Barad Dur',
- 'Gondor': 'Minas Tirith,Isengard,Osgiliath',
+ 'Gondor': 'Minas Tirith,Isengard,Osgiliath',
- 'Lorien': 'Lorien',
+ 'Lorien': 'Lorien',
- 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough'
+ 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough'
}
+ global purpose
- purpose = ["Visit", "Transit", "Work", "Immigrate"]
+ purpose = ["Visit", "Transit", "Work", "Immigrate"]
- """Store user input here"""
+ """Store user input here"""
+ global UPassport
- UPassport = namedtuple("UPassport", "country name gender isscity expdate serial")
+ UPassport = namedtuple("UPassport", "country name gender isscity expdate serial")
+ global UPass
- UPass = namedtuple("UPass", "name gender purpose duration serial expires")
+ UPass = namedtuple("UPass", "name gender purpose duration serial expires")
+ global UWorkVisa
- UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires")
+ UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires")
+ global URecord
- URecord = namedtuple("URecord", "purpose duration")
+ URecord = namedtuple("URecord", "purpose duration")
|
Define init function. Make variables actually global
|
## Code Before:
from collections import namedtuple
"""Document constants"""
countries = ["Mordor", "Gondor", "Lorien", "Shire"]
genders = ["Male", "Female"]
cities = {
'Mordor': 'Minas Morgul,Barad Dur',
'Gondor': 'Minas Tirith,Isengard,Osgiliath',
'Lorien': 'Lorien',
'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough'
}
purpose = ["Visit", "Transit", "Work", "Immigrate"]
"""Store user input here"""
UPassport = namedtuple("UPassport", "country name gender isscity expdate serial")
UPass = namedtuple("UPass", "name gender purpose duration serial expires")
UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires")
URecord = namedtuple("URecord", "purpose duration")
## Instruction:
Define init function. Make variables actually global
## Code After:
from collections import namedtuple
def init():
"""Document constants"""
global countries
countries = ["Mordor", "Gondor", "Lorien", "Shire"]
global genders
genders = ["Male", "Female"]
global cities
cities = {
'Mordor': 'Minas Morgul,Barad Dur',
'Gondor': 'Minas Tirith,Isengard,Osgiliath',
'Lorien': 'Lorien',
'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough'
}
global purpose
purpose = ["Visit", "Transit", "Work", "Immigrate"]
"""Store user input here"""
global UPassport
UPassport = namedtuple("UPassport", "country name gender isscity expdate serial")
global UPass
UPass = namedtuple("UPass", "name gender purpose duration serial expires")
global UWorkVisa
UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires")
global URecord
URecord = namedtuple("URecord", "purpose duration")
|
from collections import namedtuple
+ def init():
- """Document constants"""
+ """Document constants"""
? ++++
+ global countries
- countries = ["Mordor", "Gondor", "Lorien", "Shire"]
+ countries = ["Mordor", "Gondor", "Lorien", "Shire"]
? ++++
+ global genders
- genders = ["Male", "Female"]
+ genders = ["Male", "Female"]
? ++++
+ global cities
- cities = {
+ cities = {
? ++++
- 'Mordor': 'Minas Morgul,Barad Dur',
+ 'Mordor': 'Minas Morgul,Barad Dur',
? ++++
- 'Gondor': 'Minas Tirith,Isengard,Osgiliath',
+ 'Gondor': 'Minas Tirith,Isengard,Osgiliath',
? ++++
- 'Lorien': 'Lorien',
+ 'Lorien': 'Lorien',
? ++++
- 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough'
+ 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough'
? ++++
}
+ global purpose
- purpose = ["Visit", "Transit", "Work", "Immigrate"]
+ purpose = ["Visit", "Transit", "Work", "Immigrate"]
? ++++
- """Store user input here"""
+ """Store user input here"""
? ++++
+ global UPassport
- UPassport = namedtuple("UPassport", "country name gender isscity expdate serial")
+ UPassport = namedtuple("UPassport", "country name gender isscity expdate serial")
? ++++
+ global UPass
- UPass = namedtuple("UPass", "name gender purpose duration serial expires")
+ UPass = namedtuple("UPass", "name gender purpose duration serial expires")
? ++++
+ global UWorkVisa
- UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires")
+ UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires")
? ++++
+ global URecord
- URecord = namedtuple("URecord", "purpose duration")
+ URecord = namedtuple("URecord", "purpose duration")
? ++++
|
c65e2b3bb2d43a5d12d50cf79636e94a6a1f1dc5
|
Algo.py
|
Algo.py
|
from abc import ABC, abstractmethod
from Color import RandomColor
class Algo(ABC):
def __init__(self, population_size):
self.population_size = population_size
self.set_random_population()
def set_random_population(self):
self.population = []
for i in range(self.population_size):
self.population.append(RandomColor())
def dump(self):
for color in self.population:
print('{} '.format(color), end='')
print(flush=True)
@abstractmethod
def tick(self, deltas):
pass
|
from abc import ABC, abstractmethod
from Color import RandomColor
class Algo(ABC):
def __init__(self, population_size):
assert population_size % 2 == 0
self.half_population_size = population_size // 2
self.population_size = population_size
self.set_random_population()
def set_random_population(self):
self.population = []
for i in range(self.population_size):
self.population.append(RandomColor())
def dump(self):
for color in self.population:
print('{} '.format(color), end='')
print(flush=True)
@abstractmethod
def tick(self, deltas):
pass
|
Check that population_size is even, and set half_population_size.
|
Check that population_size is even, and set half_population_size.
This will be used by genetic algorithms.
|
Python
|
isc
|
dargor/python-guess-random-color,dargor/python-guess-random-color
|
from abc import ABC, abstractmethod
from Color import RandomColor
class Algo(ABC):
def __init__(self, population_size):
+
+ assert population_size % 2 == 0
+
+ self.half_population_size = population_size // 2
self.population_size = population_size
+
self.set_random_population()
def set_random_population(self):
self.population = []
for i in range(self.population_size):
self.population.append(RandomColor())
def dump(self):
for color in self.population:
print('{} '.format(color), end='')
print(flush=True)
@abstractmethod
def tick(self, deltas):
pass
|
Check that population_size is even, and set half_population_size.
|
## Code Before:
from abc import ABC, abstractmethod
from Color import RandomColor
class Algo(ABC):
def __init__(self, population_size):
self.population_size = population_size
self.set_random_population()
def set_random_population(self):
self.population = []
for i in range(self.population_size):
self.population.append(RandomColor())
def dump(self):
for color in self.population:
print('{} '.format(color), end='')
print(flush=True)
@abstractmethod
def tick(self, deltas):
pass
## Instruction:
Check that population_size is even, and set half_population_size.
## Code After:
from abc import ABC, abstractmethod
from Color import RandomColor
class Algo(ABC):
def __init__(self, population_size):
assert population_size % 2 == 0
self.half_population_size = population_size // 2
self.population_size = population_size
self.set_random_population()
def set_random_population(self):
self.population = []
for i in range(self.population_size):
self.population.append(RandomColor())
def dump(self):
for color in self.population:
print('{} '.format(color), end='')
print(flush=True)
@abstractmethod
def tick(self, deltas):
pass
|
from abc import ABC, abstractmethod
from Color import RandomColor
class Algo(ABC):
def __init__(self, population_size):
+
+ assert population_size % 2 == 0
+
+ self.half_population_size = population_size // 2
self.population_size = population_size
+
self.set_random_population()
def set_random_population(self):
self.population = []
for i in range(self.population_size):
self.population.append(RandomColor())
def dump(self):
for color in self.population:
print('{} '.format(color), end='')
print(flush=True)
@abstractmethod
def tick(self, deltas):
pass
|
41a0fa6412427dadfb33c77da45bc88c576fa67c
|
rdo/drivers/base.py
|
rdo/drivers/base.py
|
from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
|
from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
def working_dir(self, cmd):
command = ' '.join(cmd)
working_dir = self.config.get('directory')
if working_dir:
command = 'cd %s && %s' % (working_dir, command)
return command
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
|
Add a common function for deriving the working dir.
|
Add a common function for deriving the working dir.
|
Python
|
bsd-3-clause
|
ionrock/rdo
|
from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
+ def working_dir(self, cmd):
+ command = ' '.join(cmd)
+ working_dir = self.config.get('directory')
+ if working_dir:
+ command = 'cd %s && %s' % (working_dir, command)
+ return command
+
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
|
Add a common function for deriving the working dir.
|
## Code Before:
from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
## Instruction:
Add a common function for deriving the working dir.
## Code After:
from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
def working_dir(self, cmd):
command = ' '.join(cmd)
working_dir = self.config.get('directory')
if working_dir:
command = 'cd %s && %s' % (working_dir, command)
return command
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
|
from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
+ def working_dir(self, cmd):
+ command = ' '.join(cmd)
+ working_dir = self.config.get('directory')
+ if working_dir:
+ command = 'cd %s && %s' % (working_dir, command)
+ return command
+
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
|
a9711705a8c122bf7e7f1edbf9b640c3be5f8510
|
integration-test/552-water-boundary-sort-key.py
|
integration-test/552-water-boundary-sort-key.py
|
assert_has_feature(
19, 83900, 202617, "water",
{"kind": "ocean", "boundary": True, "sort_rank": 205})
|
assert_has_feature(
16, 10487, 25327, "water",
{"kind": "ocean", "boundary": True, "sort_rank": 205})
|
Update water boundary sort key test zooms
|
Update water boundary sort key test zooms
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
assert_has_feature(
- 19, 83900, 202617, "water",
+ 16, 10487, 25327, "water",
{"kind": "ocean", "boundary": True, "sort_rank": 205})
|
Update water boundary sort key test zooms
|
## Code Before:
assert_has_feature(
19, 83900, 202617, "water",
{"kind": "ocean", "boundary": True, "sort_rank": 205})
## Instruction:
Update water boundary sort key test zooms
## Code After:
assert_has_feature(
16, 10487, 25327, "water",
{"kind": "ocean", "boundary": True, "sort_rank": 205})
|
assert_has_feature(
- 19, 83900, 202617, "water",
? ^ ^^^^ ^ --
+ 16, 10487, 25327, "water",
? ^ +++ ^ ^^
{"kind": "ocean", "boundary": True, "sort_rank": 205})
|
eb79cce84fbb9d801d6f5087b9216e66d56bfa51
|
scripts/generate_global_kwargs_doc.py
|
scripts/generate_global_kwargs_doc.py
|
from os import path
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
from os import path
from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
pyinfra_config = Config()
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
default = config.get('default')
if callable(default):
default = default(pyinfra_config)
if default:
key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
Include defaults in generated global args list.
|
Include defaults in generated global args list.
|
Python
|
mit
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
from os import path
+ from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
+ pyinfra_config = Config()
+
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
+ default = config.get('default')
+ if callable(default):
+ default = default(pyinfra_config)
+ if default:
+ key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
Include defaults in generated global args list.
|
## Code Before:
from os import path
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
## Instruction:
Include defaults in generated global args list.
## Code After:
from os import path
from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
pyinfra_config = Config()
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
default = config.get('default')
if callable(default):
default = default(pyinfra_config)
if default:
key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
from os import path
+ from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
+ pyinfra_config = Config()
+
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
+ default = config.get('default')
+ if callable(default):
+ default = default(pyinfra_config)
+ if default:
+ key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
6518911dad0d22e878d618f9a9a1472de7a7ee1e
|
config/fuzz_pox_mesh.py
|
config/fuzz_pox_mesh.py
|
from experiment_config_lib import ControllerConfig
from sts.topology import MeshTopology
from sts.control_flow import Fuzzer, Interactive
from sts.input_traces.input_logger import InputLogger
from sts.invariant_checker import InvariantChecker
from sts.simulation_state import SimulationConfig
# Use POX as our controller
command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer '''
'''openflow.discovery forwarding.l2_multi '''
'''sts.util.socket_mux.pox_monkeypatcher '''
'''openflow.of_01 --address=__address__ --port=__port__''')
controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")]
topology_class = MeshTopology
topology_params = "num_switches=2"
dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace"
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params,
dataplane_trace=dataplane_trace,
multiplex_sockets=True)
control_flow = Fuzzer(simulation_config, check_interval=80,
halt_on_violation=False,
input_logger=InputLogger(),
invariant_check=InvariantChecker.check_loops)
#control_flow = Interactive(simulation_config, input_logger=InputLogger())
|
from experiment_config_lib import ControllerConfig
from sts.topology import MeshTopology
from sts.control_flow import Fuzzer, Interactive
from sts.input_traces.input_logger import InputLogger
from sts.invariant_checker import InvariantChecker
from sts.simulation_state import SimulationConfig
# Use POX as our controller
command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer '''
'''openflow.mock_discovery forwarding.l2_multi '''
'''sts.util.socket_mux.pox_monkeypatcher '''
'''openflow.of_01 --address=__address__ --port=__port__''')
controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")]
topology_class = MeshTopology
topology_params = "num_switches=2"
dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace"
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params,
dataplane_trace=dataplane_trace,
multiplex_sockets=True)
control_flow = Fuzzer(simulation_config, check_interval=80,
halt_on_violation=False,
input_logger=InputLogger(),
invariant_check=InvariantChecker.check_loops,
mock_link_discovery=True)
#control_flow = Interactive(simulation_config, input_logger=InputLogger())
|
Use the mock discovery module
|
Use the mock discovery module
|
Python
|
apache-2.0
|
jmiserez/sts,ucb-sts/sts,ucb-sts/sts,jmiserez/sts
|
from experiment_config_lib import ControllerConfig
from sts.topology import MeshTopology
from sts.control_flow import Fuzzer, Interactive
from sts.input_traces.input_logger import InputLogger
from sts.invariant_checker import InvariantChecker
from sts.simulation_state import SimulationConfig
# Use POX as our controller
command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer '''
- '''openflow.discovery forwarding.l2_multi '''
+ '''openflow.mock_discovery forwarding.l2_multi '''
'''sts.util.socket_mux.pox_monkeypatcher '''
'''openflow.of_01 --address=__address__ --port=__port__''')
controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")]
topology_class = MeshTopology
topology_params = "num_switches=2"
dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace"
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params,
dataplane_trace=dataplane_trace,
multiplex_sockets=True)
control_flow = Fuzzer(simulation_config, check_interval=80,
halt_on_violation=False,
input_logger=InputLogger(),
- invariant_check=InvariantChecker.check_loops)
+ invariant_check=InvariantChecker.check_loops,
+ mock_link_discovery=True)
#control_flow = Interactive(simulation_config, input_logger=InputLogger())
|
Use the mock discovery module
|
## Code Before:
from experiment_config_lib import ControllerConfig
from sts.topology import MeshTopology
from sts.control_flow import Fuzzer, Interactive
from sts.input_traces.input_logger import InputLogger
from sts.invariant_checker import InvariantChecker
from sts.simulation_state import SimulationConfig
# Use POX as our controller
command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer '''
'''openflow.discovery forwarding.l2_multi '''
'''sts.util.socket_mux.pox_monkeypatcher '''
'''openflow.of_01 --address=__address__ --port=__port__''')
controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")]
topology_class = MeshTopology
topology_params = "num_switches=2"
dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace"
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params,
dataplane_trace=dataplane_trace,
multiplex_sockets=True)
control_flow = Fuzzer(simulation_config, check_interval=80,
halt_on_violation=False,
input_logger=InputLogger(),
invariant_check=InvariantChecker.check_loops)
#control_flow = Interactive(simulation_config, input_logger=InputLogger())
## Instruction:
Use the mock discovery module
## Code After:
from experiment_config_lib import ControllerConfig
from sts.topology import MeshTopology
from sts.control_flow import Fuzzer, Interactive
from sts.input_traces.input_logger import InputLogger
from sts.invariant_checker import InvariantChecker
from sts.simulation_state import SimulationConfig
# Use POX as our controller
command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer '''
'''openflow.mock_discovery forwarding.l2_multi '''
'''sts.util.socket_mux.pox_monkeypatcher '''
'''openflow.of_01 --address=__address__ --port=__port__''')
controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")]
topology_class = MeshTopology
topology_params = "num_switches=2"
dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace"
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params,
dataplane_trace=dataplane_trace,
multiplex_sockets=True)
control_flow = Fuzzer(simulation_config, check_interval=80,
halt_on_violation=False,
input_logger=InputLogger(),
invariant_check=InvariantChecker.check_loops,
mock_link_discovery=True)
#control_flow = Interactive(simulation_config, input_logger=InputLogger())
|
from experiment_config_lib import ControllerConfig
from sts.topology import MeshTopology
from sts.control_flow import Fuzzer, Interactive
from sts.input_traces.input_logger import InputLogger
from sts.invariant_checker import InvariantChecker
from sts.simulation_state import SimulationConfig
# Use POX as our controller
command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer '''
- '''openflow.discovery forwarding.l2_multi '''
+ '''openflow.mock_discovery forwarding.l2_multi '''
? +++++
'''sts.util.socket_mux.pox_monkeypatcher '''
'''openflow.of_01 --address=__address__ --port=__port__''')
controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")]
topology_class = MeshTopology
topology_params = "num_switches=2"
dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace"
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params,
dataplane_trace=dataplane_trace,
multiplex_sockets=True)
control_flow = Fuzzer(simulation_config, check_interval=80,
halt_on_violation=False,
input_logger=InputLogger(),
- invariant_check=InvariantChecker.check_loops)
? ^
+ invariant_check=InvariantChecker.check_loops,
? ^
+ mock_link_discovery=True)
#control_flow = Interactive(simulation_config, input_logger=InputLogger())
|
ead2f795480ae7e671c93550e55cf9e106b2f306
|
hubblestack_nova/pkgng_audit.py
|
hubblestack_nova/pkgng_audit.py
|
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160421
:platform: FreeBSD
:requires: SaltStack
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
__tags__ = None
def __virtual__():
if 'FreeBSD' not in __grains__['os']:
return False, 'This audit module only runs on FreeBSD'
global __tags__
__tags__ = ['freebsd-pkg-audit']
return True
def audit(tags, verbose=False):
'''
Run the pkg.audit command
'''
ret = {'Success': [], 'Failure': []}
salt_ret = __salt__['pkg.audit']()
if '0 problem(s)' not in salt_ret:
ret['Failure'].append(salt_ret)
else:
ret['Success'].append(salt_ret)
return ret
|
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160421
:platform: FreeBSD
:requires: SaltStack
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def __virtual__():
if 'FreeBSD' not in __grains__['os']:
return False, 'This audit module only runs on FreeBSD'
return True
def audit(data_list, tags, verbose=False):
'''
Run the pkg.audit command
'''
ret = {'Success': [], 'Failure': []}
__tags__ = []
for data in data_list:
if 'freebsd-pkg' in data:
__tags__ = ['freebsd-pkg-audit']
break
if not __tags__:
# No yaml data found, don't do any work
return ret
salt_ret = __salt__['pkg.audit']()
if '0 problem(s)' not in salt_ret:
ret['Failure'].append(salt_ret)
else:
ret['Success'].append(salt_ret)
return ret
|
Update frebsd-pkg-audit to rely on yaml data and take data from hubble.py
|
Update frebsd-pkg-audit to rely on yaml data and take data from hubble.py
|
Python
|
apache-2.0
|
HubbleStack/Nova,avb76/Nova,SaltyCharles/Nova,cedwards/Nova
|
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160421
:platform: FreeBSD
:requires: SaltStack
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
- __tags__ = None
-
def __virtual__():
if 'FreeBSD' not in __grains__['os']:
return False, 'This audit module only runs on FreeBSD'
- global __tags__
- __tags__ = ['freebsd-pkg-audit']
return True
- def audit(tags, verbose=False):
+ def audit(data_list, tags, verbose=False):
'''
Run the pkg.audit command
'''
ret = {'Success': [], 'Failure': []}
+
+ __tags__ = []
+ for data in data_list:
+ if 'freebsd-pkg' in data:
+ __tags__ = ['freebsd-pkg-audit']
+ break
+
+ if not __tags__:
+ # No yaml data found, don't do any work
+ return ret
salt_ret = __salt__['pkg.audit']()
if '0 problem(s)' not in salt_ret:
ret['Failure'].append(salt_ret)
else:
ret['Success'].append(salt_ret)
return ret
|
Update frebsd-pkg-audit to rely on yaml data and take data from hubble.py
|
## Code Before:
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160421
:platform: FreeBSD
:requires: SaltStack
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
__tags__ = None
def __virtual__():
if 'FreeBSD' not in __grains__['os']:
return False, 'This audit module only runs on FreeBSD'
global __tags__
__tags__ = ['freebsd-pkg-audit']
return True
def audit(tags, verbose=False):
'''
Run the pkg.audit command
'''
ret = {'Success': [], 'Failure': []}
salt_ret = __salt__['pkg.audit']()
if '0 problem(s)' not in salt_ret:
ret['Failure'].append(salt_ret)
else:
ret['Success'].append(salt_ret)
return ret
## Instruction:
Update frebsd-pkg-audit to rely on yaml data and take data from hubble.py
## Code After:
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160421
:platform: FreeBSD
:requires: SaltStack
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def __virtual__():
if 'FreeBSD' not in __grains__['os']:
return False, 'This audit module only runs on FreeBSD'
return True
def audit(data_list, tags, verbose=False):
'''
Run the pkg.audit command
'''
ret = {'Success': [], 'Failure': []}
__tags__ = []
for data in data_list:
if 'freebsd-pkg' in data:
__tags__ = ['freebsd-pkg-audit']
break
if not __tags__:
# No yaml data found, don't do any work
return ret
salt_ret = __salt__['pkg.audit']()
if '0 problem(s)' not in salt_ret:
ret['Failure'].append(salt_ret)
else:
ret['Success'].append(salt_ret)
return ret
|
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160421
:platform: FreeBSD
:requires: SaltStack
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
- __tags__ = None
-
def __virtual__():
if 'FreeBSD' not in __grains__['os']:
return False, 'This audit module only runs on FreeBSD'
- global __tags__
- __tags__ = ['freebsd-pkg-audit']
return True
- def audit(tags, verbose=False):
+ def audit(data_list, tags, verbose=False):
? +++++++++++
'''
Run the pkg.audit command
'''
ret = {'Success': [], 'Failure': []}
+
+ __tags__ = []
+ for data in data_list:
+ if 'freebsd-pkg' in data:
+ __tags__ = ['freebsd-pkg-audit']
+ break
+
+ if not __tags__:
+ # No yaml data found, don't do any work
+ return ret
salt_ret = __salt__['pkg.audit']()
if '0 problem(s)' not in salt_ret:
ret['Failure'].append(salt_ret)
else:
ret['Success'].append(salt_ret)
return ret
|
6d663d1d0172b716e0dccc1f617b5a09b2905b67
|
script/upload-windows-pdb.py
|
script/upload-windows-pdb.py
|
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
Use lowercase for symbol paths
|
Use lowercase for symbol paths
|
Python
|
mit
|
wolfflow/electron,shockone/electron,ianscrivener/electron,oiledCode/electron,christian-bromann/electron,fffej/electron,darwin/electron,digideskio/electron,jannishuebl/electron,darwin/electron,lrlna/electron,faizalpribadi/electron,lzpfmh/electron,rsvip/electron,mubassirhayat/electron,bwiggs/electron,jiaz/electron,gstack/infinium-shell,bobwol/electron,meowlab/electron,egoist/electron,simongregory/electron,vHanda/electron,felixrieseberg/electron,nekuz0r/electron,wolfflow/electron,rreimann/electron,fritx/electron,destan/electron,vaginessa/electron,michaelchiche/electron,stevemao/electron,John-Lin/electron,JussMee15/electron,jlhbaseball15/electron,rhencke/electron,stevemao/electron,brave/electron,fomojola/electron,thompsonemerson/electron,gamedevsam/electron,shiftkey/electron,Neron-X5/electron,fritx/electron,bruce/electron,voidbridge/electron,BionicClick/electron,lzpfmh/electron,howmuchcomputer/electron,RIAEvangelist/electron,Gerhut/electron,howmuchcomputer/electron,tinydew4/electron,maxogden/atom-shell,seanchas116/electron,joaomoreno/atom-shell,destan/electron,mattdesl/electron,iftekeriba/electron,brave/muon,the-ress/electron,bright-sparks/electron,christian-bromann/electron,rprichard/electron,darwin/electron,jonatasfreitasv/electron,adamjgray/electron,ankitaggarwal011/electron,wan-qy/electron,medixdev/electron,rsvip/electron,bbondy/electron,nagyistoce/electron-atom-shell,medixdev/electron,mhkeller/electron,eriser/electron,deed02392/electron,rhencke/electron,seanchas116/electron,dkfiresky/electron,jsutcodes/electron,pombredanne/electron,astoilkov/electron,MaxGraey/electron,deepak1556/atom-shell,simonfork/electron,gbn972/electron,greyhwndz/electron,edulan/electron,roadev/electron,robinvandernoord/electron,kazupon/electron,rajatsingla28/electron,howmuchcomputer/electron,meowlab/electron,tomashanacek/electron,chriskdon/electron,evgenyzinoviev/electron,joneit/electron,gabriel/electron,jiaz/electron,arturts/electron,greyhwndz/electron,edulan/electron,micalan/electron,kazupon/electron,d-salas/electron,eric-seekas/electron,thompsonemerson/electron,RobertJGabriel/electron,vaginessa/electron,electron/electron,iftekeriba/electron,natgolov/electron,xfstudio/electron,ervinb/electron,xfstudio/electron,xiruibing/electron,arturts/electron,renaesop/electron,roadev/electron,fffej/electron,thomsonreuters/electron,coderhaoxin/electron,twolfson/electron,evgenyzinoviev/electron,RobertJGabriel/electron,fomojola/electron,jsutcodes/electron,nagyistoce/electron-atom-shell,leftstick/electron,noikiy/electron,BionicClick/electron,LadyNaggaga/electron,gstack/infinium-shell,yalexx/electron,webmechanicx/electron,rajatsingla28/electron,greyhwndz/electron,webmechanicx/electron,carsonmcdonald/electron,bwiggs/electron,vaginessa/electron,jtburke/electron,beni55/electron,eriser/electron,JesselJohn/electron,jtburke/electron,jaanus/electron,Andrey-Pavlov/electron,robinvandernoord/electron,kokdemo/electron,tincan24/electron,miniak/electron,rajatsingla28/electron,noikiy/electron,nicobot/electron,setzer777/electron,gabriel/electron,zhakui/electron,jonatasfreitasv/electron,synaptek/electron,subblue/electron,kokdemo/electron,Evercoder/electron,aecca/electron,neutrous/electron,ianscrivener/electron,stevekinney/electron,adcentury/electron,Floato/electron,deepak1556/atom-shell,bbondy/electron,nicobot/electron,bitemyapp/electron,jacksondc/electron,jannishuebl/electron,mhkeller/electron,anko/electron,RIAEvangelist/electron,SufianHassan/electron,eric-seekas/electron,mattotodd/electron,LadyNaggaga/electron,shaundunne/electron,vHanda/electron,pombredanne/electron,chrisswk/electron,DivyaKMenon/electron,stevekinney/electron,nicholasess/electron,Evercoder/electron,bright-sparks/electron,GoooIce/electron,zhakui/electron,trankmichael/electron,yan-foto/electron,jjz/electron,kikong/electron,gabrielPeart/electron,renaesop/electron,leolujuyi/electron,eric-seekas/electron,farmisen/electron,sircharleswatson/electron,tinydew4/electron,natgolov/electron,kikong/electron,pirafrank/electron,carsonmcdonald/electron,jjz/electron,fffej/electron,adamjgray/electron,leethomas/electron,leolujuyi/electron,chriskdon/electron,meowlab/electron,Evercoder/electron,maxogden/atom-shell,yan-foto/electron,jlord/electron,stevekinney/electron,jiaz/electron,cos2004/electron,pandoraui/electron,GoooIce/electron,thingsinjars/electron,shockone/electron,electron/electron,digideskio/electron,sky7sea/electron,farmisen/electron,bpasero/electron,gbn972/electron,nekuz0r/electron,kenmozi/electron,voidbridge/electron,aaron-goshine/electron,aliib/electron,faizalpribadi/electron,beni55/electron,adamjgray/electron,wan-qy/electron,neutrous/electron,the-ress/electron,shockone/electron,shockone/electron,pandoraui/electron,bbondy/electron,miniak/electron,shockone/electron,bitemyapp/electron,systembugtj/electron,neutrous/electron,yalexx/electron,tylergibson/electron,gabriel/electron,felixrieseberg/electron,egoist/electron,shaundunne/electron,Rokt33r/electron,gamedevsam/electron,mhkeller/electron,preco21/electron,soulteary/electron,robinvandernoord/electron,DivyaKMenon/electron,gabrielPeart/electron,bright-sparks/electron,mrwizard82d1/electron,yan-foto/electron,renaesop/electron,the-ress/electron,subblue/electron,tinydew4/electron,bwiggs/electron,chrisswk/electron,jhen0409/electron,lzpfmh/electron,brave/muon,trankmichael/electron,RobertJGabriel/electron,renaesop/electron,roadev/electron,tonyganch/electron,baiwyc119/electron,stevemao/electron,jonatasfreitasv/electron,mubassirhayat/electron,arturts/electron,pandoraui/electron,nekuz0r/electron,vipulroxx/electron,jhen0409/electron,jhen0409/electron,xiruibing/electron,adcentury/electron,fomojola/electron,coderhaoxin/electron,joneit/electron,dahal/electron,nicobot/electron,bwiggs/electron,aaron-goshine/electron,howmuchcomputer/electron,bpasero/electron,cos2004/electron,deed02392/electron,fireball-x/atom-shell,dahal/electron,jtburke/electron,tomashanacek/electron,bobwol/electron,farmisen/electron,evgenyzinoviev/electron,Zagorakiss/electron,greyhwndz/electron,shaundunne/electron,Faiz7412/electron,howmuchcomputer/electron,RIAEvangelist/electron,takashi/electron,davazp/electron,jsutcodes/electron,bobwol/electron,aaron-goshine/electron,aaron-goshine/electron,mirrh/electron,michaelchiche/electron,BionicClick/electron,baiwyc119/electron,arturts/electron,medixdev/electron,matiasinsaurralde/electron,bpasero/electron,MaxWhere/electron,shennushi/electron,aliib/electron,natgolov/electron,kcrt/electron,jiaz/electron,adcentury/electron,xiruibing/electron,chriskdon/electron,Ivshti/electron,mrwizard82d1/electron,jjz/electron,leftstick/electron,twolfson/electron,brave/muon,micalan/electron,jaanus/electron,nekuz0r/electron,micalan/electron,tincan24/electron,shockone/electron,fabien-d/electron,twolfson/electron,kcrt/electron,trankmichael/electron,LadyNaggaga/electron,LadyNaggaga/electron,Zagorakiss/electron,egoist/electron,hokein/atom-shell,JesselJohn/electron,ianscrivener/electron,voidbridge/electron,davazp/electron,saronwei/electron,MaxWhere/electron,rreimann/electron,maxogden/atom-shell,shennushi/electron,tincan24/electron,jsutcodes/electron,takashi/electron,adamjgray/electron,vHanda/electron,sshiting/electron,LadyNaggaga/electron,zhakui/electron,MaxGraey/electron,ianscrivener/electron,GoooIce/electron,jsutcodes/electron,jaanus/electron,chriskdon/electron,Gerhut/electron,wolfflow/electron,MaxWhere/electron,John-Lin/electron,deed02392/electron,mrwizard82d1/electron,abhishekgahlot/electron,etiktin/electron,brave/electron,thingsinjars/electron,mjaniszew/electron,rreimann/electron,John-Lin/electron,kenmozi/electron,arturts/electron,cos2004/electron,d-salas/electron,BionicClick/electron,biblerule/UMCTelnetHub,jonatasfreitasv/electron,Neron-X5/electron,Evercoder/electron,kikong/electron,thompsonemerson/electron,jonatasfreitasv/electron,jannishuebl/electron,shennushi/electron,eric-seekas/electron,bwiggs/electron,dkfiresky/electron,jlord/electron,Neron-X5/electron,icattlecoder/electron,Zagorakiss/electron,tincan24/electron,joneit/electron,kostia/electron,pombredanne/electron,Rokt33r/electron,matiasinsaurralde/electron,jlhbaseball15/electron,dahal/electron,systembugtj/electron,tinydew4/electron,gstack/infinium-shell,bruce/electron,BionicClick/electron,gamedevsam/electron,bpasero/electron,bitemyapp/electron,micalan/electron,dongjoon-hyun/electron,twolfson/electron,aecca/electron,timruffles/electron,subblue/electron,nekuz0r/electron,minggo/electron,jtburke/electron,rsvip/electron,zhakui/electron,bbondy/electron,preco21/electron,brenca/electron,vaginessa/electron,leolujuyi/electron,cqqccqc/electron,jannishuebl/electron,jlord/electron,jcblw/electron,zhakui/electron,rhencke/electron,meowlab/electron,MaxGraey/electron,trigrass2/electron,thompsonemerson/electron,RIAEvangelist/electron,Andrey-Pavlov/electron,mattdesl/electron,bpasero/electron,soulteary/electron,shiftkey/electron,rreimann/electron,jlhbaseball15/electron,benweissmann/electron,trigrass2/electron,chrisswk/electron,tonyganch/electron,LadyNaggaga/electron,meowlab/electron,roadev/electron,tomashanacek/electron,pombredanne/electron,seanchas116/electron,rprichard/electron,thingsinjars/electron,Rokt33r/electron,MaxGraey/electron,takashi/electron,jtburke/electron,cqqccqc/electron,Jonekee/electron,Jacobichou/electron,sky7sea/electron,wan-qy/electron,voidbridge/electron,maxogden/atom-shell,dkfiresky/electron,DivyaKMenon/electron,simongregory/electron,Jacobichou/electron,lrlna/electron,sshiting/electron,gerhardberger/electron,yalexx/electron,gerhardberger/electron,coderhaoxin/electron,vipulroxx/electron,Ivshti/electron,micalan/electron,trigrass2/electron,evgenyzinoviev/electron,leethomas/electron,astoilkov/electron,cqqccqc/electron,systembugtj/electron,Floato/electron,gerhardberger/electron,tylergibson/electron,cqqccqc/electron,synaptek/electron,tincan24/electron,rreimann/electron,iftekeriba/electron,preco21/electron,gbn972/electron,mjaniszew/electron,joaomoreno/atom-shell,gabriel/electron,vipulroxx/electron,kazupon/electron,Zagorakiss/electron,gamedevsam/electron,tincan24/electron,mubassirhayat/electron,electron/electron,deed02392/electron,bwiggs/electron,carsonmcdonald/electron,systembugtj/electron,ervinb/electron,posix4e/electron,synaptek/electron,aecca/electron,Jacobichou/electron,preco21/electron,yalexx/electron,bruce/electron,etiktin/electron,Andrey-Pavlov/electron,sshiting/electron,felixrieseberg/electron,rprichard/electron,fabien-d/electron,GoooIce/electron,adamjgray/electron,maxogden/atom-shell,vaginessa/electron,wan-qy/electron,nicholasess/electron,dkfiresky/electron,fabien-d/electron,beni55/electron,takashi/electron,deed02392/electron,stevekinney/electron,gabriel/electron,michaelchiche/electron,saronwei/electron,mjaniszew/electron,voidbridge/electron,Faiz7412/electron,carsonmcdonald/electron,Faiz7412/electron,baiwyc119/electron,posix4e/electron,adamjgray/electron,vipulroxx/electron,the-ress/electron,anko/electron,etiktin/electron,seanchas116/electron,MaxWhere/electron,aichingm/electron,RIAEvangelist/electron,Jacobichou/electron,jaanus/electron,nicobot/electron,xfstudio/electron,hokein/atom-shell,coderhaoxin/electron,neutrous/electron,SufianHassan/electron,Rokt33r/electron,RobertJGabriel/electron,tylergibson/electron,lzpfmh/electron,ankitaggarwal011/electron,joaomoreno/atom-shell,medixdev/electron,Floato/electron,mjaniszew/electron,jonatasfreitasv/electron,gamedevsam/electron,stevekinney/electron,gerhardberger/electron,jcblw/electron,timruffles/electron,shiftkey/electron,noikiy/electron,yalexx/electron,cqqccqc/electron,Gerhut/electron,tonyganch/electron,IonicaBizauKitchen/electron,joaomoreno/atom-shell,ervinb/electron,bitemyapp/electron,noikiy/electron,yan-foto/electron,smczk/electron,ankitaggarwal011/electron,trankmichael/electron,stevekinney/electron,Gerhut/electron,mirrh/electron,John-Lin/electron,mirrh/electron,xfstudio/electron,pandoraui/electron,sircharleswatson/electron,mattotodd/electron,JussMee15/electron,gabrielPeart/electron,natgolov/electron,tomashanacek/electron,cos2004/electron,abhishekgahlot/electron,hokein/atom-shell,astoilkov/electron,jannishuebl/electron,baiwyc119/electron,webmechanicx/electron,baiwyc119/electron,setzer777/electron,jacksondc/electron,farmisen/electron,posix4e/electron,soulteary/electron,natgolov/electron,anko/electron,the-ress/electron,nagyistoce/electron-atom-shell,joneit/electron,ianscrivener/electron,eric-seekas/electron,ervinb/electron,fabien-d/electron,aichingm/electron,bbondy/electron,michaelchiche/electron,Gerhut/electron,mhkeller/electron,preco21/electron,vaginessa/electron,dkfiresky/electron,simonfork/electron,setzer777/electron,systembugtj/electron,thomsonreuters/electron,d-salas/electron,trigrass2/electron,xiruibing/electron,kostia/electron,felixrieseberg/electron,cos2004/electron,tinydew4/electron,biblerule/UMCTelnetHub,minggo/electron,Zagorakiss/electron,carsonmcdonald/electron,stevemao/electron,JesselJohn/electron,timruffles/electron,synaptek/electron,jlhbaseball15/electron,Jonekee/electron,bruce/electron,fomojola/electron,eric-seekas/electron,fireball-x/atom-shell,greyhwndz/electron,jacksondc/electron,matiasinsaurralde/electron,saronwei/electron,kazupon/electron,rajatsingla28/electron,brenca/electron,dongjoon-hyun/electron,IonicaBizauKitchen/electron,arusakov/electron,nekuz0r/electron,kazupon/electron,thomsonreuters/electron,mattdesl/electron,simonfork/electron,smczk/electron,nicholasess/electron,Neron-X5/electron,eriser/electron,GoooIce/electron,bright-sparks/electron,faizalpribadi/electron,bitemyapp/electron,mjaniszew/electron,eriser/electron,soulteary/electron,synaptek/electron,gabrielPeart/electron,simonfork/electron,mirrh/electron,mhkeller/electron,nicholasess/electron,fritx/electron,howmuchcomputer/electron,tonyganch/electron,posix4e/electron,kenmozi/electron,xfstudio/electron,JussMee15/electron,Jonekee/electron,aecca/electron,sircharleswatson/electron,icattlecoder/electron,jiaz/electron,renaesop/electron,rreimann/electron,Andrey-Pavlov/electron,thomsonreuters/electron,rhencke/electron,simongregory/electron,RobertJGabriel/electron,simonfork/electron,Andrey-Pavlov/electron,nagyistoce/electron-atom-shell,wan-qy/electron,pombredanne/electron,thingsinjars/electron,evgenyzinoviev/electron,lrlna/electron,setzer777/electron,benweissmann/electron,robinvandernoord/electron,aliib/electron,Andrey-Pavlov/electron,darwin/electron,shiftkey/electron,twolfson/electron,benweissmann/electron,shiftkey/electron,DivyaKMenon/electron,smczk/electron,wan-qy/electron,egoist/electron,pirafrank/electron,jcblw/electron,tinydew4/electron,leethomas/electron,evgenyzinoviev/electron,Jonekee/electron,jjz/electron,vipulroxx/electron,chrisswk/electron,minggo/electron,miniak/electron,RobertJGabriel/electron,fabien-d/electron,biblerule/UMCTelnetHub,kikong/electron,neutrous/electron,mirrh/electron,JussMee15/electron,dongjoon-hyun/electron,seanchas116/electron,minggo/electron,wolfflow/electron,aliib/electron,xiruibing/electron,jcblw/electron,adcentury/electron,rsvip/electron,oiledCode/electron,MaxWhere/electron,Neron-X5/electron,rajatsingla28/electron,faizalpribadi/electron,digideskio/electron,xiruibing/electron,DivyaKMenon/electron,Rokt33r/electron,bright-sparks/electron,GoooIce/electron,Neron-X5/electron,electron/electron,brave/electron,deed02392/electron,vHanda/electron,destan/electron,DivyaKMenon/electron,jlhbaseball15/electron,sky7sea/electron,fffej/electron,oiledCode/electron,jiaz/electron,jlord/electron,brave/muon,kostia/electron,bruce/electron,setzer777/electron,mjaniszew/electron,sircharleswatson/electron,saronwei/electron,zhakui/electron,deepak1556/atom-shell,dahal/electron,ervinb/electron,John-Lin/electron,chrisswk/electron,jjz/electron,jacksondc/electron,d-salas/electron,destan/electron,soulteary/electron,Ivshti/electron,nicholasess/electron,baiwyc119/electron,edulan/electron,natgolov/electron,posix4e/electron,leftstick/electron,bright-sparks/electron,farmisen/electron,beni55/electron,jjz/electron,yalexx/electron,Jacobichou/electron,fffej/electron,leethomas/electron,gbn972/electron,matiasinsaurralde/electron,Ivshti/electron,bobwol/electron,Floato/electron,IonicaBizauKitchen/electron,stevemao/electron,fireball-x/atom-shell,pirafrank/electron,SufianHassan/electron,etiktin/electron,JesselJohn/electron,vHanda/electron,icattlecoder/electron,gamedevsam/electron,stevemao/electron,biblerule/UMCTelnetHub,destan/electron,shaundunne/electron,anko/electron,shennushi/electron,fritx/electron,kostia/electron,faizalpribadi/electron,anko/electron,meowlab/electron,biblerule/UMCTelnetHub,seanchas116/electron,jacksondc/electron,leolujuyi/electron,robinvandernoord/electron,MaxWhere/electron,IonicaBizauKitchen/electron,shennushi/electron,simongregory/electron,Faiz7412/electron,rhencke/electron,miniak/electron,mrwizard82d1/electron,kokdemo/electron,michaelchiche/electron,faizalpribadi/electron,greyhwndz/electron,electron/electron,mattotodd/electron,fomojola/electron,arusakov/electron,arusakov/electron,JesselJohn/electron,nicobot/electron,bpasero/electron,kokdemo/electron,SufianHassan/electron,jlord/electron,roadev/electron,christian-bromann/electron,sky7sea/electron,IonicaBizauKitchen/electron,trigrass2/electron,adcentury/electron,adcentury/electron,minggo/electron,rajatsingla28/electron,bitemyapp/electron,sshiting/electron,thomsonreuters/electron,iftekeriba/electron,mattdesl/electron,gerhardberger/electron,the-ress/electron,ankitaggarwal011/electron,rprichard/electron,SufianHassan/electron,Zagorakiss/electron,leolujuyi/electron,benweissmann/electron,Ivshti/electron,thompsonemerson/electron,wolfflow/electron,gerhardberger/electron,hokein/atom-shell,fffej/electron,BionicClick/electron,coderhaoxin/electron,chriskdon/electron,destan/electron,abhishekgahlot/electron,lrlna/electron,smczk/electron,kenmozi/electron,micalan/electron,kcrt/electron,kcrt/electron,trankmichael/electron,simongregory/electron,pirafrank/electron,jcblw/electron,medixdev/electron,aliib/electron,digideskio/electron,kcrt/electron,oiledCode/electron,pombredanne/electron,jaanus/electron,aaron-goshine/electron,christian-bromann/electron,jlhbaseball15/electron,icattlecoder/electron,jannishuebl/electron,abhishekgahlot/electron,gabrielPeart/electron,bobwol/electron,davazp/electron,mrwizard82d1/electron,joneit/electron,miniak/electron,brenca/electron,darwin/electron,icattlecoder/electron,brave/electron,sky7sea/electron,smczk/electron,synaptek/electron,eriser/electron,Evercoder/electron,abhishekgahlot/electron,shennushi/electron,davazp/electron,tonyganch/electron,shiftkey/electron,RIAEvangelist/electron,coderhaoxin/electron,fritx/electron,aecca/electron,MaxGraey/electron,kostia/electron,lrlna/electron,tomashanacek/electron,oiledCode/electron,jaanus/electron,lzpfmh/electron,gabrielPeart/electron,yan-foto/electron,dongjoon-hyun/electron,medixdev/electron,arturts/electron,felixrieseberg/electron,nagyistoce/electron-atom-shell,beni55/electron,jacksondc/electron,kenmozi/electron,miniak/electron,tylergibson/electron,kikong/electron,gbn972/electron,fritx/electron,Jonekee/electron,felixrieseberg/electron,simonfork/electron,thingsinjars/electron,smczk/electron,deepak1556/atom-shell,kokdemo/electron,dongjoon-hyun/electron,neutrous/electron,davazp/electron,Jacobichou/electron,d-salas/electron,matiasinsaurralde/electron,systembugtj/electron,JesselJohn/electron,mrwizard82d1/electron,Gerhut/electron,dongjoon-hyun/electron,IonicaBizauKitchen/electron,lrlna/electron,brenca/electron,biblerule/UMCTelnetHub,saronwei/electron,dahal/electron,rsvip/electron,iftekeriba/electron,trankmichael/electron,benweissmann/electron,michaelchiche/electron,bruce/electron,setzer777/electron,SufianHassan/electron,etiktin/electron,the-ress/electron,mattdesl/electron,electron/electron,noikiy/electron,lzpfmh/electron,Faiz7412/electron,brave/electron,Jonekee/electron,gerhardberger/electron,mattotodd/electron,fomojola/electron,cqqccqc/electron,mattdesl/electron,arusakov/electron,deepak1556/atom-shell,xfstudio/electron,kcrt/electron,soulteary/electron,farmisen/electron,brenca/electron,robinvandernoord/electron,fireball-x/atom-shell,gstack/infinium-shell,mirrh/electron,ankitaggarwal011/electron,roadev/electron,christian-bromann/electron,bpasero/electron,egoist/electron,egoist/electron,timruffles/electron,tomashanacek/electron,webmechanicx/electron,gbn972/electron,subblue/electron,bbondy/electron,joneit/electron,icattlecoder/electron,jcblw/electron,tylergibson/electron,aaron-goshine/electron,webmechanicx/electron,sshiting/electron,anko/electron,thomsonreuters/electron,gstack/infinium-shell,d-salas/electron,brenca/electron,Floato/electron,aecca/electron,pandoraui/electron,thompsonemerson/electron,iftekeriba/electron,mubassirhayat/electron,subblue/electron,simongregory/electron,tonyganch/electron,leethomas/electron,leftstick/electron,sshiting/electron,takashi/electron,noikiy/electron,joaomoreno/atom-shell,thingsinjars/electron,pirafrank/electron,JussMee15/electron,vHanda/electron,JussMee15/electron,yan-foto/electron,kokdemo/electron,jhen0409/electron,arusakov/electron,christian-bromann/electron,edulan/electron,ianscrivener/electron,mhkeller/electron,tylergibson/electron,pandoraui/electron,arusakov/electron,beni55/electron,shaundunne/electron,dkfiresky/electron,carsonmcdonald/electron,bobwol/electron,renaesop/electron,timruffles/electron,Floato/electron,ankitaggarwal011/electron,wolfflow/electron,oiledCode/electron,aichingm/electron,posix4e/electron,twolfson/electron,kenmozi/electron,mattotodd/electron,etiktin/electron,jtburke/electron,sky7sea/electron,davazp/electron,chriskdon/electron,leolujuyi/electron,eriser/electron,nicobot/electron,brave/muon,Rokt33r/electron,leftstick/electron,brave/electron,digideskio/electron,edulan/electron,mattotodd/electron,gabriel/electron,webmechanicx/electron,kostia/electron,dahal/electron,joaomoreno/atom-shell,edulan/electron,benweissmann/electron,pirafrank/electron,mubassirhayat/electron,aliib/electron,sircharleswatson/electron,vipulroxx/electron,rhencke/electron,aichingm/electron,shaundunne/electron,jhen0409/electron,Evercoder/electron,John-Lin/electron,astoilkov/electron,brave/muon,cos2004/electron,abhishekgahlot/electron,preco21/electron,digideskio/electron,matiasinsaurralde/electron,trigrass2/electron,nicholasess/electron,astoilkov/electron,jhen0409/electron,hokein/atom-shell,leethomas/electron,minggo/electron,fireball-x/atom-shell,astoilkov/electron,leftstick/electron,voidbridge/electron,jsutcodes/electron,subblue/electron,kazupon/electron,aichingm/electron,sircharleswatson/electron,ervinb/electron,takashi/electron,saronwei/electron,electron/electron,aichingm/electron
|
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
+ files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
Use lowercase for symbol paths
|
## Code Before:
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
## Instruction:
Use lowercase for symbol paths
## Code After:
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
+ files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
819e34fb8cd60a25b7796508f72a1e9ba00b5faf
|
incuna_test_utils/factories/user.py
|
incuna_test_utils/factories/user.py
|
import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
|
import factory
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except ImportError: # Django 1.4
from django.contrib.auth.models import User
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = User
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
|
Fix UserFactory for django 1.4
|
Fix UserFactory for django 1.4
|
Python
|
bsd-2-clause
|
incuna/incuna-test-utils,incuna/incuna-test-utils
|
import factory
+ try:
- from django.contrib.auth import get_user_model
+ from django.contrib.auth import get_user_model
+ User = get_user_model()
+ except ImportError: # Django 1.4
+ from django.contrib.auth.models import User
class UserFactory(factory.DjangoModelFactory):
- FACTORY_FOR = get_user_model()
+ FACTORY_FOR = User
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
|
Fix UserFactory for django 1.4
|
## Code Before:
import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
## Instruction:
Fix UserFactory for django 1.4
## Code After:
import factory
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except ImportError: # Django 1.4
from django.contrib.auth.models import User
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = User
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
|
import factory
+ try:
- from django.contrib.auth import get_user_model
+ from django.contrib.auth import get_user_model
? ++++
+ User = get_user_model()
+ except ImportError: # Django 1.4
+ from django.contrib.auth.models import User
class UserFactory(factory.DjangoModelFactory):
- FACTORY_FOR = get_user_model()
? ^^^^^ --------
+ FACTORY_FOR = User
? ^
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
|
bd11c37a8669bdae2d4561483f50da0891b82627
|
monsetup/detection/plugins/zookeeper.py
|
monsetup/detection/plugins/zookeeper.py
|
import logging
import os
import yaml
import monsetup.agent_config
import monsetup.detection
log = logging.getLogger(__name__)
class Zookeeper(monsetup.detection.Plugin):
"""Detect Zookeeper daemons and setup configuration to monitor them.
"""
def _detect(self):
"""Run detection, set self.available True if the service is detected.
"""
if monsetup.detection.find_process_cmdline('zookeeper') is not None:
self.available = True
def build_config(self):
"""Build the config as a Plugins object and return.
"""
config = monsetup.agent_config.Plugins()
# First watch the process
log.info("\tWatching the zookeeper process.")
config.merge(monsetup.detection.watch_process(['zookeeper']))
log.info("\tEnabling the zookeeper plugin")
with open(os.path.join(self.template_dir, 'conf.d/zk.yaml.example'), 'r') as zk_template:
zk_config = yaml.load(zk_template.read())
config['zk'] = zk_config
return config
def dependencies_installed(self):
# The current plugin just does a simple socket connection to zookeeper and
# parses the stat command
return True
|
import logging
import os
import yaml
import monsetup.agent_config
import monsetup.detection
log = logging.getLogger(__name__)
class Zookeeper(monsetup.detection.Plugin):
"""Detect Zookeeper daemons and setup configuration to monitor them.
"""
def _detect(self):
"""Run detection, set self.available True if the service is detected.
"""
if monsetup.detection.find_process_cmdline('org.apache.zookeeper') is not None:
self.available = True
def build_config(self):
"""Build the config as a Plugins object and return.
"""
config = monsetup.agent_config.Plugins()
# First watch the process
log.info("\tWatching the zookeeper process.")
config.merge(monsetup.detection.watch_process(['zookeeper']))
log.info("\tEnabling the zookeeper plugin")
with open(os.path.join(self.template_dir, 'conf.d/zk.yaml.example'), 'r') as zk_template:
zk_config = yaml.load(zk_template.read())
config['zk'] = zk_config
return config
def dependencies_installed(self):
# The current plugin just does a simple socket connection to zookeeper and
# parses the stat command
return True
|
Fix detection of Zookeeper in monasca-setup
|
Fix detection of Zookeeper in monasca-setup
The Zookeeper detection plugin was looking for zookeeper in the process
command-line. This was producing false positives in the detection
process because storm uses the zookeeper library and it shows up
the command-line for storm.
Change-Id: I764a3064003beec55f0e589272855dadfa0997e7
|
Python
|
bsd-3-clause
|
sapcc/monasca-agent,sapcc/monasca-agent,sapcc/monasca-agent
|
import logging
import os
import yaml
import monsetup.agent_config
import monsetup.detection
log = logging.getLogger(__name__)
class Zookeeper(monsetup.detection.Plugin):
"""Detect Zookeeper daemons and setup configuration to monitor them.
"""
def _detect(self):
"""Run detection, set self.available True if the service is detected.
"""
- if monsetup.detection.find_process_cmdline('zookeeper') is not None:
+ if monsetup.detection.find_process_cmdline('org.apache.zookeeper') is not None:
self.available = True
def build_config(self):
"""Build the config as a Plugins object and return.
"""
config = monsetup.agent_config.Plugins()
# First watch the process
log.info("\tWatching the zookeeper process.")
config.merge(monsetup.detection.watch_process(['zookeeper']))
log.info("\tEnabling the zookeeper plugin")
with open(os.path.join(self.template_dir, 'conf.d/zk.yaml.example'), 'r') as zk_template:
zk_config = yaml.load(zk_template.read())
config['zk'] = zk_config
return config
def dependencies_installed(self):
# The current plugin just does a simple socket connection to zookeeper and
# parses the stat command
return True
|
Fix detection of Zookeeper in monasca-setup
|
## Code Before:
import logging
import os
import yaml
import monsetup.agent_config
import monsetup.detection
log = logging.getLogger(__name__)
class Zookeeper(monsetup.detection.Plugin):
"""Detect Zookeeper daemons and setup configuration to monitor them.
"""
def _detect(self):
"""Run detection, set self.available True if the service is detected.
"""
if monsetup.detection.find_process_cmdline('zookeeper') is not None:
self.available = True
def build_config(self):
"""Build the config as a Plugins object and return.
"""
config = monsetup.agent_config.Plugins()
# First watch the process
log.info("\tWatching the zookeeper process.")
config.merge(monsetup.detection.watch_process(['zookeeper']))
log.info("\tEnabling the zookeeper plugin")
with open(os.path.join(self.template_dir, 'conf.d/zk.yaml.example'), 'r') as zk_template:
zk_config = yaml.load(zk_template.read())
config['zk'] = zk_config
return config
def dependencies_installed(self):
# The current plugin just does a simple socket connection to zookeeper and
# parses the stat command
return True
## Instruction:
Fix detection of Zookeeper in monasca-setup
## Code After:
import logging
import os
import yaml
import monsetup.agent_config
import monsetup.detection
log = logging.getLogger(__name__)
class Zookeeper(monsetup.detection.Plugin):
"""Detect Zookeeper daemons and setup configuration to monitor them.
"""
def _detect(self):
"""Run detection, set self.available True if the service is detected.
"""
if monsetup.detection.find_process_cmdline('org.apache.zookeeper') is not None:
self.available = True
def build_config(self):
"""Build the config as a Plugins object and return.
"""
config = monsetup.agent_config.Plugins()
# First watch the process
log.info("\tWatching the zookeeper process.")
config.merge(monsetup.detection.watch_process(['zookeeper']))
log.info("\tEnabling the zookeeper plugin")
with open(os.path.join(self.template_dir, 'conf.d/zk.yaml.example'), 'r') as zk_template:
zk_config = yaml.load(zk_template.read())
config['zk'] = zk_config
return config
def dependencies_installed(self):
# The current plugin just does a simple socket connection to zookeeper and
# parses the stat command
return True
|
import logging
import os
import yaml
import monsetup.agent_config
import monsetup.detection
log = logging.getLogger(__name__)
class Zookeeper(monsetup.detection.Plugin):
"""Detect Zookeeper daemons and setup configuration to monitor them.
"""
def _detect(self):
"""Run detection, set self.available True if the service is detected.
"""
- if monsetup.detection.find_process_cmdline('zookeeper') is not None:
+ if monsetup.detection.find_process_cmdline('org.apache.zookeeper') is not None:
? +++++++++++
self.available = True
def build_config(self):
"""Build the config as a Plugins object and return.
"""
config = monsetup.agent_config.Plugins()
# First watch the process
log.info("\tWatching the zookeeper process.")
config.merge(monsetup.detection.watch_process(['zookeeper']))
log.info("\tEnabling the zookeeper plugin")
with open(os.path.join(self.template_dir, 'conf.d/zk.yaml.example'), 'r') as zk_template:
zk_config = yaml.load(zk_template.read())
config['zk'] = zk_config
return config
def dependencies_installed(self):
# The current plugin just does a simple socket connection to zookeeper and
# parses the stat command
return True
|
611c34eee4b5aa263669f1b7321b97fab9a98b5e
|
dask/distributed/tests/test_ipython_utils.py
|
dask/distributed/tests/test_ipython_utils.py
|
from dask.distributed import dask_client_from_ipclient
def test_dask_client_from_ipclient():
from IPython.parallel import Client
c = Client()
dc = dask_client_from_ipclient(c)
assert 2 == dc.get({'a': 1, 'b': (lambda x: x + 1, 'a')}, 'b')
dc.close(close_workers=True, close_scheduler=True)
|
from dask.distributed import dask_client_from_ipclient
import numpy as np
from numpy.testing import assert_array_almost_equal
import dask.array as da
def test_dask_client_from_ipclient():
from IPython.parallel import Client
c = Client()
dask_client = dask_client_from_ipclient(c)
# data
a = np.arange(100).reshape(10, 10)
d = da.from_array(a, ((5, 5), (5, 5)))
# test array.mean
expected = a.mean(axis=0)
d1 = d.mean(axis=0)
result = d1.compute(get=dask_client.get)
assert_array_almost_equal(result, expected)
# test ghosting
d2 = da.ghost.ghost(d, depth=1, boundary='reflect')
d3 = da.ghost.trim_internal(d2, {0: 1, 1: 1})
result1 = d3.compute(get=dask_client.get)
assert_array_almost_equal(result1, a)
# close the workers
dask_client.close(close_workers=True, close_scheduler=True)
|
Remove lambda test. Add dask array tests.
|
Remove lambda test. Add dask array tests.
|
Python
|
bsd-3-clause
|
PhE/dask,clarkfitzg/dask,jayhetee/dask,simudream/dask,mikegraham/dask,vikhyat/dask,PhE/dask,wiso/dask,jcrist/dask,esc/dask,mraspaud/dask,esc/dask,marianotepper/dask,vikhyat/dask,pombredanne/dask,simudream/dask,freeman-lab/dask,cpcloud/dask,blaze/dask,marianotepper/dask,jcrist/dask,hainm/dask,ContinuumIO/dask,blaze/dask,wiso/dask,mraspaud/dask,chrisbarber/dask,cowlicks/dask,gameduell/dask,freeman-lab/dask,jayhetee/dask,jakirkham/dask,jakirkham/dask,mrocklin/dask,mrocklin/dask,dask/dask,pombredanne/dask,hainm/dask,clarkfitzg/dask,ContinuumIO/dask,ssanderson/dask,dask/dask,ssanderson/dask
|
from dask.distributed import dask_client_from_ipclient
+ import numpy as np
+ from numpy.testing import assert_array_almost_equal
+ import dask.array as da
+
def test_dask_client_from_ipclient():
from IPython.parallel import Client
c = Client()
- dc = dask_client_from_ipclient(c)
+ dask_client = dask_client_from_ipclient(c)
- assert 2 == dc.get({'a': 1, 'b': (lambda x: x + 1, 'a')}, 'b')
- dc.close(close_workers=True, close_scheduler=True)
+ # data
+ a = np.arange(100).reshape(10, 10)
+ d = da.from_array(a, ((5, 5), (5, 5)))
+
+ # test array.mean
+ expected = a.mean(axis=0)
+ d1 = d.mean(axis=0)
+ result = d1.compute(get=dask_client.get)
+ assert_array_almost_equal(result, expected)
+
+ # test ghosting
+ d2 = da.ghost.ghost(d, depth=1, boundary='reflect')
+ d3 = da.ghost.trim_internal(d2, {0: 1, 1: 1})
+ result1 = d3.compute(get=dask_client.get)
+ assert_array_almost_equal(result1, a)
+
+ # close the workers
+ dask_client.close(close_workers=True, close_scheduler=True)
+
|
Remove lambda test. Add dask array tests.
|
## Code Before:
from dask.distributed import dask_client_from_ipclient
def test_dask_client_from_ipclient():
from IPython.parallel import Client
c = Client()
dc = dask_client_from_ipclient(c)
assert 2 == dc.get({'a': 1, 'b': (lambda x: x + 1, 'a')}, 'b')
dc.close(close_workers=True, close_scheduler=True)
## Instruction:
Remove lambda test. Add dask array tests.
## Code After:
from dask.distributed import dask_client_from_ipclient
import numpy as np
from numpy.testing import assert_array_almost_equal
import dask.array as da
def test_dask_client_from_ipclient():
from IPython.parallel import Client
c = Client()
dask_client = dask_client_from_ipclient(c)
# data
a = np.arange(100).reshape(10, 10)
d = da.from_array(a, ((5, 5), (5, 5)))
# test array.mean
expected = a.mean(axis=0)
d1 = d.mean(axis=0)
result = d1.compute(get=dask_client.get)
assert_array_almost_equal(result, expected)
# test ghosting
d2 = da.ghost.ghost(d, depth=1, boundary='reflect')
d3 = da.ghost.trim_internal(d2, {0: 1, 1: 1})
result1 = d3.compute(get=dask_client.get)
assert_array_almost_equal(result1, a)
# close the workers
dask_client.close(close_workers=True, close_scheduler=True)
|
from dask.distributed import dask_client_from_ipclient
+ import numpy as np
+ from numpy.testing import assert_array_almost_equal
+ import dask.array as da
+
def test_dask_client_from_ipclient():
from IPython.parallel import Client
c = Client()
- dc = dask_client_from_ipclient(c)
+ dask_client = dask_client_from_ipclient(c)
? ++++ +++++
- assert 2 == dc.get({'a': 1, 'b': (lambda x: x + 1, 'a')}, 'b')
+
+ # data
+ a = np.arange(100).reshape(10, 10)
+ d = da.from_array(a, ((5, 5), (5, 5)))
+
+ # test array.mean
+ expected = a.mean(axis=0)
+ d1 = d.mean(axis=0)
+ result = d1.compute(get=dask_client.get)
+ assert_array_almost_equal(result, expected)
+
+ # test ghosting
+ d2 = da.ghost.ghost(d, depth=1, boundary='reflect')
+ d3 = da.ghost.trim_internal(d2, {0: 1, 1: 1})
+ result1 = d3.compute(get=dask_client.get)
+ assert_array_almost_equal(result1, a)
+
+ # close the workers
- dc.close(close_workers=True, close_scheduler=True)
+ dask_client.close(close_workers=True, close_scheduler=True)
? ++++ +++++
|
dd739126181b29493c9d1d90a7e40eac09c23666
|
app/models.py
|
app/models.py
|
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True, unique=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
Add unique constraint to rid
|
Add unique constraint to rid
|
Python
|
mit
|
reubano/hdxscraper-hdro,reubano/hdxscraper-hdro,reubano/hdxscraper-hdro
|
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
- rid = db.Column(db.String(16), nullable=False, index=True)
+ rid = db.Column(db.String(16), nullable=False, index=True, unique=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
Add unique constraint to rid
|
## Code Before:
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
## Instruction:
Add unique constraint to rid
## Code After:
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True, unique=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
- rid = db.Column(db.String(16), nullable=False, index=True)
+ rid = db.Column(db.String(16), nullable=False, index=True, unique=True)
? +++++++++++++
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
d42314b323aa0f8c764d72a5ebebc0e7d5ac88f3
|
nova/api/openstack/compute/schemas/v3/create_backup.py
|
nova/api/openstack/compute/schemas/v3/create_backup.py
|
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'create_backup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
'enum': ['daily', 'weekly'],
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['create_backup'],
'additionalProperties': False,
}
|
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'create_backup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['create_backup'],
'additionalProperties': False,
}
|
Remove param check for backup type on v2.1 API
|
Remove param check for backup type on v2.1 API
The backup type is only used by glance, so nova check it make
no sense; currently we have daily and weekly as only valid param
but someone may add 'monthly' as param. nova should allow it
and delegate the error. This patch removes check on v2.1 API.
Change-Id: I59bbc0f589c8c280eb8cd87aa279898fffaeab7a
Closes-Bug: #1361490
|
Python
|
apache-2.0
|
devendermishrajio/nova,affo/nova,projectcalico/calico-nova,whitepages/nova,klmitch/nova,jianghuaw/nova,cernops/nova,Stavitsky/nova,fnordahl/nova,blueboxgroup/nova,CEG-FYP-OpenStack/scheduler,Francis-Liu/animated-broccoli,j-carpentier/nova,joker946/nova,hanlind/nova,rajalokan/nova,zhimin711/nova,silenceli/nova,ruslanloman/nova,isyippee/nova,akash1808/nova_test_latest,BeyondTheClouds/nova,belmiromoreira/nova,yatinkumbhare/openstack-nova,mmnelemane/nova,BeyondTheClouds/nova,JioCloud/nova_test_latest,mmnelemane/nova,mikalstill/nova,double12gzh/nova,sebrandon1/nova,cloudbase/nova-virtualbox,phenoxim/nova,devendermishrajio/nova_test_latest,NeCTAR-RC/nova,JioCloud/nova,apporc/nova,ruslanloman/nova,jianghuaw/nova,rahulunair/nova,whitepages/nova,adelina-t/nova,blueboxgroup/nova,noironetworks/nova,alaski/nova,adelina-t/nova,rahulunair/nova,felixma/nova,Juniper/nova,iuliat/nova,alexandrucoman/vbox-nova-driver,Tehsmash/nova,dims/nova,orbitfp7/nova,tealover/nova,yosshy/nova,mahak/nova,CEG-FYP-OpenStack/scheduler,JianyuWang/nova,cernops/nova,sebrandon1/nova,akash1808/nova_test_latest,Juniper/nova,varunarya10/nova_test_latest,Juniper/nova,double12gzh/nova,devendermishrajio/nova_test_latest,tudorvio/nova,BeyondTheClouds/nova,felixma/nova,alexandrucoman/vbox-nova-driver,cyx1231st/nova,CloudServer/nova,projectcalico/calico-nova,iuliat/nova,openstack/nova,mahak/nova,vmturbo/nova,eonpatapon/nova,jeffrey4l/nova,cloudbase/nova,rajalokan/nova,yosshy/nova,vmturbo/nova,nikesh-mahalka/nova,mandeepdhami/nova,mgagne/nova,mahak/nova,TwinkleChawla/nova,CloudServer/nova,belmiromoreira/nova,thomasem/nova,shail2810/nova,devendermishrajio/nova,JioCloud/nova_test_latest,NeCTAR-RC/nova,jeffrey4l/nova,cloudbase/nova-virtualbox,openstack/nova,zhimin711/nova,gooddata/openstack-nova,zzicewind/nova,Metaswitch/calico-nova,joker946/nova,LoHChina/nova,cyx1231st/nova,jianghuaw/nova,ted-gould/nova,raildo/nova,zaina/nova,ted-gould/nova,zaina/nova,petrutlucian94/nova,jianghuaw/nova,phenoxim/nova,mandeepdhami/nova,Yusuke1987/openstack_template,rajalokan/nova,cloudbase/nova,vmturbo/nova,affo/nova,akash1808/nova,gooddata/openstack-nova,bgxavier/nova,Stavitsky/nova,rajalokan/nova,MountainWei/nova,bgxavier/nova,fnordahl/nova,scripnichenko/nova,hanlind/nova,yatinkumbhare/openstack-nova,sebrandon1/nova,Francis-Liu/animated-broccoli,watonyweng/nova,barnsnake351/nova,JioCloud/nova,TwinkleChawla/nova,raildo/nova,bigswitch/nova,Tehsmash/nova,varunarya10/nova_test_latest,CCI-MOC/nova,shail2810/nova,gooddata/openstack-nova,dims/nova,zzicewind/nova,vmturbo/nova,tealover/nova,mgagne/nova,alvarolopez/nova,eonpatapon/nova,barnsnake351/nova,klmitch/nova,mikalstill/nova,rahulunair/nova,apporc/nova,edulramirez/nova,openstack/nova,isyippee/nova,tudorvio/nova,Metaswitch/calico-nova,JianyuWang/nova,kimjaejoong/nova,hanlind/nova,kimjaejoong/nova,bigswitch/nova,tangfeixiong/nova,petrutlucian94/nova,klmitch/nova,gooddata/openstack-nova,scripnichenko/nova,MountainWei/nova,cernops/nova,orbitfp7/nova,takeshineshiro/nova,klmitch/nova,edulramirez/nova,akash1808/nova,j-carpentier/nova,tangfeixiong/nova,silenceli/nova,nikesh-mahalka/nova,dawnpower/nova,CCI-MOC/nova,takeshineshiro/nova,LoHChina/nova,Juniper/nova,alaski/nova,noironetworks/nova,alvarolopez/nova,Yusuke1987/openstack_template,watonyweng/nova,thomasem/nova,cloudbase/nova,mikalstill/nova,dawnpower/nova
|
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'create_backup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
- 'enum': ['daily', 'weekly'],
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['create_backup'],
'additionalProperties': False,
}
|
Remove param check for backup type on v2.1 API
|
## Code Before:
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'create_backup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
'enum': ['daily', 'weekly'],
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['create_backup'],
'additionalProperties': False,
}
## Instruction:
Remove param check for backup type on v2.1 API
## Code After:
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'create_backup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['create_backup'],
'additionalProperties': False,
}
|
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'create_backup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
- 'enum': ['daily', 'weekly'],
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['create_backup'],
'additionalProperties': False,
}
|
7a77b42691e051269a146fd218dd619ccefecc54
|
src/ansible/models.py
|
src/ansible/models.py
|
from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
|
from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
class Meta:
verbose_name_plural = "registries"
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
|
Fix plural form of Registry
|
Fix plural form of Registry
TIL how to use meta class
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
+ class Meta:
+ verbose_name_plural = "registries"
+
+
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
|
Fix plural form of Registry
|
## Code Before:
from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
## Instruction:
Fix plural form of Registry
## Code After:
from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
class Meta:
verbose_name_plural = "registries"
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
|
from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
+ class Meta:
+ verbose_name_plural = "registries"
+
+
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
|
45f33dcf98b7b20fbedf3e05ca5c575ce3cbcbb3
|
scripts/generate-invoice.py
|
scripts/generate-invoice.py
|
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
# TODO: Validation
# TODO: Sum of invoice items equals total
# TODO: Invoice number matches filename
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
|
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
|
Move TODO items to GitHub issues
|
Move TODO items to GitHub issues
|
Python
|
mit
|
pwaring/125-accounts,pwaring/125-accounts
|
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
- # TODO: Validation
- # TODO: Sum of invoice items equals total
- # TODO: Invoice number matches filename
-
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
|
Move TODO items to GitHub issues
|
## Code Before:
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
# TODO: Validation
# TODO: Sum of invoice items equals total
# TODO: Invoice number matches filename
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
## Instruction:
Move TODO items to GitHub issues
## Code After:
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
|
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
- # TODO: Validation
- # TODO: Sum of invoice items equals total
- # TODO: Invoice number matches filename
-
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
|
1657e46cd5c2a81df4cbb73b292b0bf9072d5c51
|
h2o-py/tests/testdir_tree/pyunit_tree_irf.py
|
h2o-py/tests/testdir_tree/pyunit_tree_irf.py
|
import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate.csv"))
prostate["RACE"] = prostate["RACE"].asfactor()
iso_model = H2OIsolationForestEstimator()
iso_model.train(training_frame = prostate, x = list(set(prostate.col_names) - set(["ID", "CAPSULE"])))
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
|
import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
cat_frame = h2o.create_frame(cols=10, categorical_fraction=1, seed=42)
# check all columns are categorical
assert set(cat_frame.types.values()) == set(['enum'])
iso_model = H2OIsolationForestEstimator(seed=42)
iso_model.train(training_frame=cat_frame)
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
|
Fix test: make sure that Isolation Forest actually make a categorical split
|
Fix test: make sure that Isolation Forest actually make a categorical split
|
Python
|
apache-2.0
|
h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,h2oai/h2o-3
|
import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
+ cat_frame = h2o.create_frame(cols=10, categorical_fraction=1, seed=42)
+ # check all columns are categorical
+ assert set(cat_frame.types.values()) == set(['enum'])
- prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate.csv"))
- prostate["RACE"] = prostate["RACE"].asfactor()
- iso_model = H2OIsolationForestEstimator()
+ iso_model = H2OIsolationForestEstimator(seed=42)
+ iso_model.train(training_frame=cat_frame)
- iso_model.train(training_frame = prostate, x = list(set(prostate.col_names) - set(["ID", "CAPSULE"])))
-
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
|
Fix test: make sure that Isolation Forest actually make a categorical split
|
## Code Before:
import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate.csv"))
prostate["RACE"] = prostate["RACE"].asfactor()
iso_model = H2OIsolationForestEstimator()
iso_model.train(training_frame = prostate, x = list(set(prostate.col_names) - set(["ID", "CAPSULE"])))
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
## Instruction:
Fix test: make sure that Isolation Forest actually make a categorical split
## Code After:
import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
cat_frame = h2o.create_frame(cols=10, categorical_fraction=1, seed=42)
# check all columns are categorical
assert set(cat_frame.types.values()) == set(['enum'])
iso_model = H2OIsolationForestEstimator(seed=42)
iso_model.train(training_frame=cat_frame)
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
|
import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
+ cat_frame = h2o.create_frame(cols=10, categorical_fraction=1, seed=42)
+ # check all columns are categorical
+ assert set(cat_frame.types.values()) == set(['enum'])
- prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate.csv"))
- prostate["RACE"] = prostate["RACE"].asfactor()
- iso_model = H2OIsolationForestEstimator()
+ iso_model = H2OIsolationForestEstimator(seed=42)
? +++++++
+ iso_model.train(training_frame=cat_frame)
- iso_model.train(training_frame = prostate, x = list(set(prostate.col_names) - set(["ID", "CAPSULE"])))
-
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
|
d12907dd681c1d16c623b9dcceed9ff5e85c2ac6
|
views.py
|
views.py
|
from django.shortcuts import render
def intro(request, template='intro.html'):
response = render(request, template)
response['X-Frame-Options'] = 'SAMEORIGIN'
return response
|
from django.shortcuts import render
from django.views.decorators.clickjacking import xframe_options_sameorigin
@xframe_options_sameorigin
def intro(request, template='intro.html'):
response = render(request, template)
return response
|
Use X-Frame-Options decorator to override middleware.
|
Use X-Frame-Options decorator to override middleware.
|
Python
|
bsd-3-clause
|
m8ttyB/pontoon-intro,mathjazz/pontoon-intro,mathjazz/pontoon-intro,Osmose/pontoon-intro,jotes/pontoon-intro,Osmose/pontoon-intro,jotes/pontoon-intro,m8ttyB/pontoon-intro,jotes/pontoon-intro,mathjazz/pontoon-intro,m8ttyB/pontoon-intro,Osmose/pontoon-intro
|
-
from django.shortcuts import render
+ from django.views.decorators.clickjacking import xframe_options_sameorigin
+ @xframe_options_sameorigin
def intro(request, template='intro.html'):
response = render(request, template)
- response['X-Frame-Options'] = 'SAMEORIGIN'
-
return response
|
Use X-Frame-Options decorator to override middleware.
|
## Code Before:
from django.shortcuts import render
def intro(request, template='intro.html'):
response = render(request, template)
response['X-Frame-Options'] = 'SAMEORIGIN'
return response
## Instruction:
Use X-Frame-Options decorator to override middleware.
## Code After:
from django.shortcuts import render
from django.views.decorators.clickjacking import xframe_options_sameorigin
@xframe_options_sameorigin
def intro(request, template='intro.html'):
response = render(request, template)
return response
|
-
from django.shortcuts import render
+ from django.views.decorators.clickjacking import xframe_options_sameorigin
+ @xframe_options_sameorigin
def intro(request, template='intro.html'):
response = render(request, template)
- response['X-Frame-Options'] = 'SAMEORIGIN'
-
return response
|
5cb497d0741f6dbd29a6e41fa9f1cb3374e8f062
|
jsontosql.py
|
jsontosql.py
|
import os
import os.path
from json import loads
import click
from vendcrawler.scripts.vendcrawlerdb import VendCrawlerDB
class JSONToSQL(object):
def __init__(self, json, user, password, database):
self.data = loads(json.read())
self.db = VendCrawlerDB(user, password, database)
table = 'items'
columns = ['item_id', 'item_name', 'vendor_id', 'shop_name',
'amount', 'price', 'map', 'datetime']
values = []
for items in data:
for item in items:
value = [int(item['id']),
item['name'],
int(item['vendor_id']),
item['shop'],
int(item['amount'].replace(',', '')),
int(item['price'].replace(',', '')),
item['map'],
item['datetime']]
values.append(value)
self.vcdb.insert(table, columns, values)
@click.command()
@click.argument('json', type=click.File('r'))
@click.argument('user')
@click.argument('password')
@click.argument('database')
def cli(json, user, password, database):
JSONToSQL(json, user, password, database)
if __name__ == '__main__':
cli()
|
import os
import os.path
from json import loads
import click
from vendcrawler.scripts.vendcrawlerdb import VendCrawlerDB
class JSONToSQL(object):
def __init__(self, json, user, password, database):
data = loads(json.read())
db = VendCrawlerDB(user, password, database)
table = 'items'
columns = ['item_id', 'item_name', 'vendor_id', 'shop_name',
'amount', 'price', 'map', 'datetime']
values = []
for items in data:
for item in items:
value = [int(item['id']),
item['name'],
int(item['vendor_id']),
item['shop'],
int(item['amount'].replace(',', '')),
int(item['price'].replace(',', '')),
item['map'],
item['datetime']]
values.append(value)
self.db.insert(table, columns, values)
@click.command()
@click.argument('json', type=click.File('r'))
@click.argument('user')
@click.argument('password')
@click.argument('database')
def cli(json, user, password, database):
JSONToSQL(json, user, password, database)
if __name__ == '__main__':
cli()
|
Fix json to sql converter.
|
Fix json to sql converter.
|
Python
|
mit
|
josetaas/vendcrawler,josetaas/vendcrawler,josetaas/vendcrawler
|
import os
import os.path
from json import loads
import click
from vendcrawler.scripts.vendcrawlerdb import VendCrawlerDB
class JSONToSQL(object):
def __init__(self, json, user, password, database):
- self.data = loads(json.read())
+ data = loads(json.read())
- self.db = VendCrawlerDB(user, password, database)
+ db = VendCrawlerDB(user, password, database)
table = 'items'
columns = ['item_id', 'item_name', 'vendor_id', 'shop_name',
'amount', 'price', 'map', 'datetime']
values = []
for items in data:
for item in items:
value = [int(item['id']),
item['name'],
int(item['vendor_id']),
item['shop'],
int(item['amount'].replace(',', '')),
int(item['price'].replace(',', '')),
item['map'],
item['datetime']]
values.append(value)
- self.vcdb.insert(table, columns, values)
+ self.db.insert(table, columns, values)
@click.command()
@click.argument('json', type=click.File('r'))
@click.argument('user')
@click.argument('password')
@click.argument('database')
def cli(json, user, password, database):
JSONToSQL(json, user, password, database)
if __name__ == '__main__':
cli()
|
Fix json to sql converter.
|
## Code Before:
import os
import os.path
from json import loads
import click
from vendcrawler.scripts.vendcrawlerdb import VendCrawlerDB
class JSONToSQL(object):
def __init__(self, json, user, password, database):
self.data = loads(json.read())
self.db = VendCrawlerDB(user, password, database)
table = 'items'
columns = ['item_id', 'item_name', 'vendor_id', 'shop_name',
'amount', 'price', 'map', 'datetime']
values = []
for items in data:
for item in items:
value = [int(item['id']),
item['name'],
int(item['vendor_id']),
item['shop'],
int(item['amount'].replace(',', '')),
int(item['price'].replace(',', '')),
item['map'],
item['datetime']]
values.append(value)
self.vcdb.insert(table, columns, values)
@click.command()
@click.argument('json', type=click.File('r'))
@click.argument('user')
@click.argument('password')
@click.argument('database')
def cli(json, user, password, database):
JSONToSQL(json, user, password, database)
if __name__ == '__main__':
cli()
## Instruction:
Fix json to sql converter.
## Code After:
import os
import os.path
from json import loads
import click
from vendcrawler.scripts.vendcrawlerdb import VendCrawlerDB
class JSONToSQL(object):
def __init__(self, json, user, password, database):
data = loads(json.read())
db = VendCrawlerDB(user, password, database)
table = 'items'
columns = ['item_id', 'item_name', 'vendor_id', 'shop_name',
'amount', 'price', 'map', 'datetime']
values = []
for items in data:
for item in items:
value = [int(item['id']),
item['name'],
int(item['vendor_id']),
item['shop'],
int(item['amount'].replace(',', '')),
int(item['price'].replace(',', '')),
item['map'],
item['datetime']]
values.append(value)
self.db.insert(table, columns, values)
@click.command()
@click.argument('json', type=click.File('r'))
@click.argument('user')
@click.argument('password')
@click.argument('database')
def cli(json, user, password, database):
JSONToSQL(json, user, password, database)
if __name__ == '__main__':
cli()
|
import os
import os.path
from json import loads
import click
from vendcrawler.scripts.vendcrawlerdb import VendCrawlerDB
class JSONToSQL(object):
def __init__(self, json, user, password, database):
- self.data = loads(json.read())
? -----
+ data = loads(json.read())
- self.db = VendCrawlerDB(user, password, database)
? -----
+ db = VendCrawlerDB(user, password, database)
table = 'items'
columns = ['item_id', 'item_name', 'vendor_id', 'shop_name',
'amount', 'price', 'map', 'datetime']
values = []
for items in data:
for item in items:
value = [int(item['id']),
item['name'],
int(item['vendor_id']),
item['shop'],
int(item['amount'].replace(',', '')),
int(item['price'].replace(',', '')),
item['map'],
item['datetime']]
values.append(value)
- self.vcdb.insert(table, columns, values)
? --
+ self.db.insert(table, columns, values)
@click.command()
@click.argument('json', type=click.File('r'))
@click.argument('user')
@click.argument('password')
@click.argument('database')
def cli(json, user, password, database):
JSONToSQL(json, user, password, database)
if __name__ == '__main__':
cli()
|
2875a8e6c123d3d4f6039e7864ff66373c51daea
|
examples/signal_handlers/signal_handlers.py
|
examples/signal_handlers/signal_handlers.py
|
from riot.app import quit_app, run_tag
from riot.tags.style import parse_style
from riot.tags.tags import parse_tag_from_node
from riot.tags.utils import convert_string_to_node
from riot.virtual_dom import define_tag, mount
sig = define_tag('sig', '''
<sig>
<filler valign="top">
<pile>
<edit caption="What is your name?" class="highlight" id="ask" onchange="{ answer }" />
<div />
<text><span if="{ name }">Nick to meet you, </span><span class="highlight">{ name }</span></text>
<div />
<button id="exit" label="Exit" onclick="{ exit }" />
</pile>
</filler>
<script>
import urwid
def init(self, opts):
import urwid
self.name = opts['name']
def answer(self, edit, text):
self.update({'name': text})
def exit(self, button):
import urwid
raise urwid.ExitMainLoop()
</script>
</sig>
''')
style = '''
.highlight {
foreground: default,bold;
background: default;
mono: bold;
}
'''
root = convert_string_to_node('<sig></sig>')
mount(root, 'sig', 'sig', {'name': 'Default'})
app = parse_tag_from_node(root)
run_tag(app, parse_style(style))
|
from riot.app import quit_app, run_tag
from riot.tags.style import parse_style
from riot.tags.tags import parse_tag_from_node
from riot.tags.utils import convert_string_to_node
from riot.virtual_dom import define_tag, mount
sig = define_tag('sig', '''
<sig>
<filler valign="top">
<pile>
<edit caption="What is your name?" class="highlight" id="ask" onchange="{ answer }" />
<div />
<text><span if="{ name }">Nick to meet you, </span><span class="highlight">{ name }</span></text>
<div />
<button id="exit" label="Exit" onclick="{ exit }" />
</pile>
</filler>
<script>
import urwid
def init(self, opts):
import urwid
self.name = opts['name']
def answer(self, edit, text):
self.name = text
</script>
</sig>
''')
style = '''
.highlight {
foreground: default,bold;
background: default;
mono: bold;
}
'''
root = convert_string_to_node('<sig></sig>')
mount(root, 'sig', 'sig', {'name': 'Default'})
app = parse_tag_from_node(root)
run_tag(app, parse_style(style))
|
Remove useless code in signal handler example.
|
Remove useless code in signal handler example.
|
Python
|
mit
|
soasme/riotpy
|
from riot.app import quit_app, run_tag
from riot.tags.style import parse_style
from riot.tags.tags import parse_tag_from_node
from riot.tags.utils import convert_string_to_node
from riot.virtual_dom import define_tag, mount
sig = define_tag('sig', '''
<sig>
<filler valign="top">
<pile>
<edit caption="What is your name?" class="highlight" id="ask" onchange="{ answer }" />
<div />
<text><span if="{ name }">Nick to meet you, </span><span class="highlight">{ name }</span></text>
<div />
<button id="exit" label="Exit" onclick="{ exit }" />
</pile>
</filler>
<script>
import urwid
def init(self, opts):
import urwid
self.name = opts['name']
def answer(self, edit, text):
+ self.name = text
- self.update({'name': text})
-
- def exit(self, button):
- import urwid
- raise urwid.ExitMainLoop()
-
-
-
</script>
</sig>
''')
style = '''
.highlight {
foreground: default,bold;
background: default;
mono: bold;
}
'''
root = convert_string_to_node('<sig></sig>')
mount(root, 'sig', 'sig', {'name': 'Default'})
app = parse_tag_from_node(root)
run_tag(app, parse_style(style))
|
Remove useless code in signal handler example.
|
## Code Before:
from riot.app import quit_app, run_tag
from riot.tags.style import parse_style
from riot.tags.tags import parse_tag_from_node
from riot.tags.utils import convert_string_to_node
from riot.virtual_dom import define_tag, mount
sig = define_tag('sig', '''
<sig>
<filler valign="top">
<pile>
<edit caption="What is your name?" class="highlight" id="ask" onchange="{ answer }" />
<div />
<text><span if="{ name }">Nick to meet you, </span><span class="highlight">{ name }</span></text>
<div />
<button id="exit" label="Exit" onclick="{ exit }" />
</pile>
</filler>
<script>
import urwid
def init(self, opts):
import urwid
self.name = opts['name']
def answer(self, edit, text):
self.update({'name': text})
def exit(self, button):
import urwid
raise urwid.ExitMainLoop()
</script>
</sig>
''')
style = '''
.highlight {
foreground: default,bold;
background: default;
mono: bold;
}
'''
root = convert_string_to_node('<sig></sig>')
mount(root, 'sig', 'sig', {'name': 'Default'})
app = parse_tag_from_node(root)
run_tag(app, parse_style(style))
## Instruction:
Remove useless code in signal handler example.
## Code After:
from riot.app import quit_app, run_tag
from riot.tags.style import parse_style
from riot.tags.tags import parse_tag_from_node
from riot.tags.utils import convert_string_to_node
from riot.virtual_dom import define_tag, mount
sig = define_tag('sig', '''
<sig>
<filler valign="top">
<pile>
<edit caption="What is your name?" class="highlight" id="ask" onchange="{ answer }" />
<div />
<text><span if="{ name }">Nick to meet you, </span><span class="highlight">{ name }</span></text>
<div />
<button id="exit" label="Exit" onclick="{ exit }" />
</pile>
</filler>
<script>
import urwid
def init(self, opts):
import urwid
self.name = opts['name']
def answer(self, edit, text):
self.name = text
</script>
</sig>
''')
style = '''
.highlight {
foreground: default,bold;
background: default;
mono: bold;
}
'''
root = convert_string_to_node('<sig></sig>')
mount(root, 'sig', 'sig', {'name': 'Default'})
app = parse_tag_from_node(root)
run_tag(app, parse_style(style))
|
from riot.app import quit_app, run_tag
from riot.tags.style import parse_style
from riot.tags.tags import parse_tag_from_node
from riot.tags.utils import convert_string_to_node
from riot.virtual_dom import define_tag, mount
sig = define_tag('sig', '''
<sig>
<filler valign="top">
<pile>
<edit caption="What is your name?" class="highlight" id="ask" onchange="{ answer }" />
<div />
<text><span if="{ name }">Nick to meet you, </span><span class="highlight">{ name }</span></text>
<div />
<button id="exit" label="Exit" onclick="{ exit }" />
</pile>
</filler>
<script>
import urwid
def init(self, opts):
import urwid
self.name = opts['name']
def answer(self, edit, text):
+ self.name = text
- self.update({'name': text})
-
- def exit(self, button):
- import urwid
- raise urwid.ExitMainLoop()
-
-
-
</script>
</sig>
''')
style = '''
.highlight {
foreground: default,bold;
background: default;
mono: bold;
}
'''
root = convert_string_to_node('<sig></sig>')
mount(root, 'sig', 'sig', {'name': 'Default'})
app = parse_tag_from_node(root)
run_tag(app, parse_style(style))
|
e284c0e512edd18ed0ef1259fd4606d630699f3a
|
wtl/wtgithub/models.py
|
wtl/wtgithub/models.py
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
def __str__(self):
return '{0}/{1}'.format(self.owner, self.name)
|
Add missing `__str__` to `Repository` model
|
Add missing `__str__` to `Repository` model
Can't use `@python_2_unicode_compatible` without defining `__str__`.
|
Python
|
mit
|
elegion/djangodash2013,elegion/djangodash2013
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
+ def __str__(self):
+ return '{0}/{1}'.format(self.owner, self.name)
+
|
Add missing `__str__` to `Repository` model
|
## Code Before:
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
## Instruction:
Add missing `__str__` to `Repository` model
## Code After:
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
def __str__(self):
return '{0}/{1}'.format(self.owner, self.name)
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
+
+ def __str__(self):
+ return '{0}/{1}'.format(self.owner, self.name)
|
e881465050ef9edbf2b47071b1fa2fc27ac26c1a
|
tests/Settings/TestExtruderStack.py
|
tests/Settings/TestExtruderStack.py
|
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock())
|
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
#############################START OF TEST CASES################################
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock())
|
Add delimiter between global stuff and test cases
|
Add delimiter between global stuff and test cases
Helps provide some oversight since this module is about to explode in size.
Contributes to issue CURA-3497.
|
Python
|
agpl-3.0
|
hmflash/Cura,ynotstartups/Wanhao,Curahelper/Cura,ynotstartups/Wanhao,hmflash/Cura,fieldOfView/Cura,fieldOfView/Cura,Curahelper/Cura
|
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
+
+ #############################START OF TEST CASES################################
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock())
|
Add delimiter between global stuff and test cases
|
## Code Before:
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock())
## Instruction:
Add delimiter between global stuff and test cases
## Code After:
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
#############################START OF TEST CASES################################
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock())
|
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
+
+ #############################START OF TEST CASES################################
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock())
|
ea3f4934ffa8b88d8716f6550134c37e300c4003
|
sqlitebiter/_const.py
|
sqlitebiter/_const.py
|
from __future__ import absolute_import, unicode_literals
PROGRAM_NAME = "sqlitebiter"
MAX_VERBOSITY_LEVEL = 2
IPYNB_FORMAT_NAME_LIST = ["ipynb"]
TABLE_NOT_FOUND_MSG_FORMAT = "table not found in {}"
|
from __future__ import absolute_import, unicode_literals
PROGRAM_NAME = "sqlitebiter"
MAX_VERBOSITY_LEVEL = 2
IPYNB_FORMAT_NAME_LIST = ["ipynb"]
TABLE_NOT_FOUND_MSG_FORMAT = "convertible table not found in {}"
|
Modify a log message template
|
Modify a log message template
|
Python
|
mit
|
thombashi/sqlitebiter,thombashi/sqlitebiter
|
from __future__ import absolute_import, unicode_literals
PROGRAM_NAME = "sqlitebiter"
MAX_VERBOSITY_LEVEL = 2
IPYNB_FORMAT_NAME_LIST = ["ipynb"]
- TABLE_NOT_FOUND_MSG_FORMAT = "table not found in {}"
+ TABLE_NOT_FOUND_MSG_FORMAT = "convertible table not found in {}"
|
Modify a log message template
|
## Code Before:
from __future__ import absolute_import, unicode_literals
PROGRAM_NAME = "sqlitebiter"
MAX_VERBOSITY_LEVEL = 2
IPYNB_FORMAT_NAME_LIST = ["ipynb"]
TABLE_NOT_FOUND_MSG_FORMAT = "table not found in {}"
## Instruction:
Modify a log message template
## Code After:
from __future__ import absolute_import, unicode_literals
PROGRAM_NAME = "sqlitebiter"
MAX_VERBOSITY_LEVEL = 2
IPYNB_FORMAT_NAME_LIST = ["ipynb"]
TABLE_NOT_FOUND_MSG_FORMAT = "convertible table not found in {}"
|
from __future__ import absolute_import, unicode_literals
PROGRAM_NAME = "sqlitebiter"
MAX_VERBOSITY_LEVEL = 2
IPYNB_FORMAT_NAME_LIST = ["ipynb"]
- TABLE_NOT_FOUND_MSG_FORMAT = "table not found in {}"
+ TABLE_NOT_FOUND_MSG_FORMAT = "convertible table not found in {}"
? ++++++++++++
|
13cad8b6fb7c484a492333e86a6e774ce4742a40
|
src/webassets/filter/uglifyjs.py
|
src/webassets/filter/uglifyjs.py
|
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
|
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
"""
Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting.
Additional options may be passed to ``uglifyjs`` using the setting
``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
|
Make UglifyJSFilter docstring more consistent with other filters
|
Make UglifyJSFilter docstring more consistent with other filters
|
Python
|
bsd-2-clause
|
john2x/webassets,scorphus/webassets,aconrad/webassets,0x1997/webassets,heynemann/webassets,0x1997/webassets,glorpen/webassets,JDeuce/webassets,aconrad/webassets,john2x/webassets,JDeuce/webassets,glorpen/webassets,florianjacob/webassets,heynemann/webassets,aconrad/webassets,wijerasa/webassets,florianjacob/webassets,glorpen/webassets,wijerasa/webassets,heynemann/webassets,scorphus/webassets
|
-
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
+ """
+ Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
+
+ UglifyJS is an external tool written for NodeJS; this filter assumes that
+ the ``uglifyjs`` executable is in the path. Otherwise, you may define
+ a ``UGLIFYJS_BIN`` setting.
+
+ Additional options may be passed to ``uglifyjs`` using the setting
+ ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
+ """
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
|
Make UglifyJSFilter docstring more consistent with other filters
|
## Code Before:
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
## Instruction:
Make UglifyJSFilter docstring more consistent with other filters
## Code After:
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
"""
Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting.
Additional options may be passed to ``uglifyjs`` using the setting
``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
|
-
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
+ """
+ Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
+
+ UglifyJS is an external tool written for NodeJS; this filter assumes that
+ the ``uglifyjs`` executable is in the path. Otherwise, you may define
+ a ``UGLIFYJS_BIN`` setting.
+
+ Additional options may be passed to ``uglifyjs`` using the setting
+ ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
+ """
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
|
1abb838a1fa56af25b9c6369dff93c65e17fbc3a
|
manage.py
|
manage.py
|
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from app import app, db
from config import BASE_DIR
app.config.from_object(os.getenv('BG_CONFIG') or 'config.DevelopmentConfig')
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
covdir = os.path.join(BASE_DIR, 'htmlcov')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
COV.erase()
if __name__ == '__main__':
manager.run()
|
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from app import app, db
from config import BASE_DIR
app.config.from_object(os.getenv('BG_CONFIG') or 'config.DevelopmentConfig')
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
covdir = os.path.join(BASE_DIR, 'htmlcov')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
if __name__ == '__main__':
manager.run()
|
Remove erase of coverage file as it's needed by coveralls
|
Remove erase of coverage file as it's needed by coveralls
|
Python
|
apache-2.0
|
atindale/business-glossary,atindale/business-glossary
|
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from app import app, db
from config import BASE_DIR
app.config.from_object(os.getenv('BG_CONFIG') or 'config.DevelopmentConfig')
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
covdir = os.path.join(BASE_DIR, 'htmlcov')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
- COV.erase()
if __name__ == '__main__':
manager.run()
|
Remove erase of coverage file as it's needed by coveralls
|
## Code Before:
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from app import app, db
from config import BASE_DIR
app.config.from_object(os.getenv('BG_CONFIG') or 'config.DevelopmentConfig')
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
covdir = os.path.join(BASE_DIR, 'htmlcov')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
COV.erase()
if __name__ == '__main__':
manager.run()
## Instruction:
Remove erase of coverage file as it's needed by coveralls
## Code After:
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from app import app, db
from config import BASE_DIR
app.config.from_object(os.getenv('BG_CONFIG') or 'config.DevelopmentConfig')
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
covdir = os.path.join(BASE_DIR, 'htmlcov')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
if __name__ == '__main__':
manager.run()
|
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from app import app, db
from config import BASE_DIR
app.config.from_object(os.getenv('BG_CONFIG') or 'config.DevelopmentConfig')
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
covdir = os.path.join(BASE_DIR, 'htmlcov')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
- COV.erase()
if __name__ == '__main__':
manager.run()
|
88a31ebcd7b65f9282bb0d0a19ad299c1ad431ec
|
spectral_cube/__init__.py
|
spectral_cube/__init__.py
|
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_:
from .spectral_cube import SpectralCube, VaryingResolutionSpectralCube
from .stokes_spectral_cube import StokesSpectralCube
from .masks import *
|
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_:
from .spectral_cube import SpectralCube, VaryingResolutionSpectralCube
from .stokes_spectral_cube import StokesSpectralCube
from .masks import *
from .lower_dimensional_structures import Projection
|
Make Projection importable from the top level of the package
|
Make Projection importable from the top level of the package
|
Python
|
bsd-3-clause
|
e-koch/spectral-cube,jzuhone/spectral-cube,radio-astro-tools/spectral-cube,keflavich/spectral-cube,low-sky/spectral-cube
|
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_:
from .spectral_cube import SpectralCube, VaryingResolutionSpectralCube
from .stokes_spectral_cube import StokesSpectralCube
from .masks import *
+ from .lower_dimensional_structures import Projection
|
Make Projection importable from the top level of the package
|
## Code Before:
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_:
from .spectral_cube import SpectralCube, VaryingResolutionSpectralCube
from .stokes_spectral_cube import StokesSpectralCube
from .masks import *
## Instruction:
Make Projection importable from the top level of the package
## Code After:
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_:
from .spectral_cube import SpectralCube, VaryingResolutionSpectralCube
from .stokes_spectral_cube import StokesSpectralCube
from .masks import *
from .lower_dimensional_structures import Projection
|
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_:
from .spectral_cube import SpectralCube, VaryingResolutionSpectralCube
from .stokes_spectral_cube import StokesSpectralCube
from .masks import *
+ from .lower_dimensional_structures import Projection
|
00fc915c09e0052289fa28d7da174e44f838c15b
|
quickstart/python/voice/example-1-make-call/outgoing_call.6.x.py
|
quickstart/python/voice/example-1-make-call/outgoing_call.6.x.py
|
from twilio.rest import Client
# Your Account Sid and Auth Token can be found at https://www.twilio.com/console
account_sid = "AC6062f793ce5918fef56b1681e6446e87"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
call = client.calls.create(
to="+15558675310",
from_="+15017122661",
url="http://demo.twilio.com/docs/voice.xml"
)
print(call.sid)
|
from twilio.rest import Client
# Your Account Sid and Auth Token can be found at https://www.twilio.com/console
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
call = client.calls.create(
to="+15558675310",
from_="+15017122661",
url="http://demo.twilio.com/docs/voice.xml"
)
print(call.sid)
|
Use placeholder for account sid :facepalm:
|
Use placeholder for account sid :facepalm:
|
Python
|
mit
|
TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets
|
from twilio.rest import Client
# Your Account Sid and Auth Token can be found at https://www.twilio.com/console
- account_sid = "AC6062f793ce5918fef56b1681e6446e87"
+ account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
call = client.calls.create(
to="+15558675310",
from_="+15017122661",
url="http://demo.twilio.com/docs/voice.xml"
)
print(call.sid)
|
Use placeholder for account sid :facepalm:
|
## Code Before:
from twilio.rest import Client
# Your Account Sid and Auth Token can be found at https://www.twilio.com/console
account_sid = "AC6062f793ce5918fef56b1681e6446e87"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
call = client.calls.create(
to="+15558675310",
from_="+15017122661",
url="http://demo.twilio.com/docs/voice.xml"
)
print(call.sid)
## Instruction:
Use placeholder for account sid :facepalm:
## Code After:
from twilio.rest import Client
# Your Account Sid and Auth Token can be found at https://www.twilio.com/console
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
call = client.calls.create(
to="+15558675310",
from_="+15017122661",
url="http://demo.twilio.com/docs/voice.xml"
)
print(call.sid)
|
from twilio.rest import Client
# Your Account Sid and Auth Token can be found at https://www.twilio.com/console
- account_sid = "AC6062f793ce5918fef56b1681e6446e87"
+ account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
call = client.calls.create(
to="+15558675310",
from_="+15017122661",
url="http://demo.twilio.com/docs/voice.xml"
)
print(call.sid)
|
ed8b6b615bc8d006e3e31843fa31f0bda09109ed
|
spec/puzzle/examples/public_domain/zebra_puzzle_spec.py
|
spec/puzzle/examples/public_domain/zebra_puzzle_spec.py
|
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
problem = self.subject.problems()[0]
expect(astor.to_source(problem._parse())).to(
look_like(zebra_puzzle.PARSED))
with it('exports a model'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
|
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
parsed = logic_problem._parse(zebra_puzzle.SOURCE.split('\n'))
expect(astor.to_source(parsed)).to(look_like(zebra_puzzle.PARSED))
with it('models puzzle'):
model = logic_problem._model(zebra_puzzle.SOURCE.split('\n'))
print(str(model))
with it('exports a solution'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
|
Update zebra puzzle to reflect LogicProblem changes.
|
Update zebra puzzle to reflect LogicProblem changes.
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
+ parsed = logic_problem._parse(zebra_puzzle.SOURCE.split('\n'))
+ expect(astor.to_source(parsed)).to(look_like(zebra_puzzle.PARSED))
- problem = self.subject.problems()[0]
- expect(astor.to_source(problem._parse())).to(
- look_like(zebra_puzzle.PARSED))
+ with it('models puzzle'):
+ model = logic_problem._model(zebra_puzzle.SOURCE.split('\n'))
+ print(str(model))
+
- with it('exports a model'):
+ with it('exports a solution'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
|
Update zebra puzzle to reflect LogicProblem changes.
|
## Code Before:
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
problem = self.subject.problems()[0]
expect(astor.to_source(problem._parse())).to(
look_like(zebra_puzzle.PARSED))
with it('exports a model'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
## Instruction:
Update zebra puzzle to reflect LogicProblem changes.
## Code After:
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
parsed = logic_problem._parse(zebra_puzzle.SOURCE.split('\n'))
expect(astor.to_source(parsed)).to(look_like(zebra_puzzle.PARSED))
with it('models puzzle'):
model = logic_problem._model(zebra_puzzle.SOURCE.split('\n'))
print(str(model))
with it('exports a solution'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
|
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
+ parsed = logic_problem._parse(zebra_puzzle.SOURCE.split('\n'))
+ expect(astor.to_source(parsed)).to(look_like(zebra_puzzle.PARSED))
- problem = self.subject.problems()[0]
- expect(astor.to_source(problem._parse())).to(
- look_like(zebra_puzzle.PARSED))
+ with it('models puzzle'):
+ model = logic_problem._model(zebra_puzzle.SOURCE.split('\n'))
+ print(str(model))
+
- with it('exports a model'):
? ^ --
+ with it('exports a solution'):
? ^ +++++
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
|
5c43036e44e94d55c86567d4e98689acde0510e5
|
app/py/cuda_sort/sort_sep.py
|
app/py/cuda_sort/sort_sep.py
|
from cudatext import *
def _sort(s, sep_k, sep_v):
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix:', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)!=1 or len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
|
from cudatext import *
def _sort(s, sep_k, sep_v):
if sep_k:
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
else:
vals = sorted(s.split(sep_v))
return sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix (optional):', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)>1:
msg_status('Separators must have length=1')
continue
if len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
|
Sort new cmd: allow empty separator
|
Sort new cmd: allow empty separator
|
Python
|
mpl-2.0
|
Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText
|
from cudatext import *
def _sort(s, sep_k, sep_v):
+ if sep_k:
- if not sep_k in s:
+ if not sep_k in s:
- return s
+ return s
- key, val = s.split(sep_k, 1)
+ key, val = s.split(sep_k, 1)
- vals = sorted(val.split(sep_v))
+ vals = sorted(val.split(sep_v))
- return key+sep_k+sep_v.join(vals)
+ return key+sep_k+sep_v.join(vals)
+ else:
+ vals = sorted(s.split(sep_v))
+ return sep_v.join(vals)
+
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
- 'Separator of prefix, to skip prefix:', '=',
+ 'Separator of prefix, to skip prefix (optional):', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
- if len(sep_k)!=1 or len(sep_v)!=1:
+ if len(sep_k)>1:
+ msg_status('Separators must have length=1')
+ continue
+
+ if len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
|
Sort new cmd: allow empty separator
|
## Code Before:
from cudatext import *
def _sort(s, sep_k, sep_v):
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix:', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)!=1 or len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
## Instruction:
Sort new cmd: allow empty separator
## Code After:
from cudatext import *
def _sort(s, sep_k, sep_v):
if sep_k:
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
else:
vals = sorted(s.split(sep_v))
return sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix (optional):', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)>1:
msg_status('Separators must have length=1')
continue
if len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
|
from cudatext import *
def _sort(s, sep_k, sep_v):
+ if sep_k:
- if not sep_k in s:
+ if not sep_k in s:
? ++++
- return s
+ return s
? ++++
- key, val = s.split(sep_k, 1)
+ key, val = s.split(sep_k, 1)
? ++++
- vals = sorted(val.split(sep_v))
+ vals = sorted(val.split(sep_v))
? ++++
- return key+sep_k+sep_v.join(vals)
+ return key+sep_k+sep_v.join(vals)
? ++++
+ else:
+ vals = sorted(s.split(sep_v))
+ return sep_v.join(vals)
+
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
- 'Separator of prefix, to skip prefix:', '=',
+ 'Separator of prefix, to skip prefix (optional):', '=',
? +++++++++++
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
- if len(sep_k)!=1 or len(sep_v)!=1:
+ if len(sep_k)>1:
+ msg_status('Separators must have length=1')
+ continue
+
+ if len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
|
d927ada17522edfb91489e8558bbc88ff741a3c5
|
bokeh/models/widgets/markups.py
|
bokeh/models/widgets/markups.py
|
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
class Paragraph(Widget):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
@abstract
class Markup(Widget):
""" Base class for HTML markup widget models. """
class Paragraph(Markup):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
Introduce Markup abstract base class
|
Introduce Markup abstract base class
|
Python
|
bsd-3-clause
|
aiguofer/bokeh,jakirkham/bokeh,ChinaQuants/bokeh,percyfal/bokeh,bokeh/bokeh,philippjfr/bokeh,philippjfr/bokeh,stonebig/bokeh,percyfal/bokeh,muku42/bokeh,DuCorey/bokeh,muku42/bokeh,ericmjl/bokeh,azjps/bokeh,muku42/bokeh,percyfal/bokeh,philippjfr/bokeh,msarahan/bokeh,deeplook/bokeh,Karel-van-de-Plassche/bokeh,htygithub/bokeh,philippjfr/bokeh,justacec/bokeh,schoolie/bokeh,jplourenco/bokeh,evidation-health/bokeh,srinathv/bokeh,tacaswell/bokeh,ptitjano/bokeh,dennisobrien/bokeh,dennisobrien/bokeh,quasiben/bokeh,ChinaQuants/bokeh,clairetang6/bokeh,khkaminska/bokeh,evidation-health/bokeh,quasiben/bokeh,Karel-van-de-Plassche/bokeh,DuCorey/bokeh,maxalbert/bokeh,rs2/bokeh,schoolie/bokeh,timsnyder/bokeh,msarahan/bokeh,deeplook/bokeh,jplourenco/bokeh,azjps/bokeh,stonebig/bokeh,khkaminska/bokeh,ericmjl/bokeh,aiguofer/bokeh,msarahan/bokeh,mindriot101/bokeh,dennisobrien/bokeh,gpfreitas/bokeh,htygithub/bokeh,maxalbert/bokeh,percyfal/bokeh,timsnyder/bokeh,draperjames/bokeh,aavanian/bokeh,evidation-health/bokeh,mindriot101/bokeh,Karel-van-de-Plassche/bokeh,aavanian/bokeh,clairetang6/bokeh,phobson/bokeh,ericmjl/bokeh,evidation-health/bokeh,rs2/bokeh,draperjames/bokeh,jplourenco/bokeh,ptitjano/bokeh,schoolie/bokeh,ptitjano/bokeh,maxalbert/bokeh,justacec/bokeh,ChinaQuants/bokeh,ptitjano/bokeh,srinathv/bokeh,bokeh/bokeh,phobson/bokeh,azjps/bokeh,bokeh/bokeh,percyfal/bokeh,deeplook/bokeh,phobson/bokeh,mindriot101/bokeh,maxalbert/bokeh,tacaswell/bokeh,muku42/bokeh,jakirkham/bokeh,mindriot101/bokeh,justacec/bokeh,ericmjl/bokeh,deeplook/bokeh,jakirkham/bokeh,htygithub/bokeh,tacaswell/bokeh,gpfreitas/bokeh,srinathv/bokeh,draperjames/bokeh,aavanian/bokeh,khkaminska/bokeh,bokeh/bokeh,KasperPRasmussen/bokeh,azjps/bokeh,azjps/bokeh,timsnyder/bokeh,schoolie/bokeh,justacec/bokeh,philippjfr/bokeh,KasperPRasmussen/bokeh,ChinaQuants/bokeh,DuCorey/bokeh,stonebig/bokeh,tacaswell/bokeh,schoolie/bokeh,rs2/bokeh,phobson/bokeh,phobson/bokeh,aiguofer/bokeh,khkaminska/bokeh,clairetang6/bokeh,Karel-van-de-Plassche/bokeh,htygithub/bokeh,dennisobrien/bokeh,srinathv/bokeh,dennisobrien/bokeh,draperjames/bokeh,aavanian/bokeh,gpfreitas/bokeh,KasperPRasmussen/bokeh,clairetang6/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,draperjames/bokeh,KasperPRasmussen/bokeh,rs2/bokeh,jakirkham/bokeh,DuCorey/bokeh,DuCorey/bokeh,aavanian/bokeh,aiguofer/bokeh,ericmjl/bokeh,msarahan/bokeh,rs2/bokeh,timsnyder/bokeh,quasiben/bokeh,jakirkham/bokeh,gpfreitas/bokeh,aiguofer/bokeh,KasperPRasmussen/bokeh,stonebig/bokeh,bokeh/bokeh,timsnyder/bokeh,jplourenco/bokeh
|
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
+ @abstract
- class Paragraph(Widget):
+ class Markup(Widget):
+ """ Base class for HTML markup widget models. """
+
+ class Paragraph(Markup):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
Introduce Markup abstract base class
|
## Code Before:
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
class Paragraph(Widget):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
## Instruction:
Introduce Markup abstract base class
## Code After:
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
@abstract
class Markup(Widget):
""" Base class for HTML markup widget models. """
class Paragraph(Markup):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
+ @abstract
- class Paragraph(Widget):
? ^ ^^^^ -
+ class Markup(Widget):
? ^ ^^
+ """ Base class for HTML markup widget models. """
+
+ class Paragraph(Markup):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
429d701cce7ad2cf8fb77f169c4af6f2f27562fd
|
db_mutex/models.py
|
db_mutex/models.py
|
from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=255, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
|
from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=255, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "db_mutex"
|
Declare app_label in model Meta class to work with Django 1.9
|
Declare app_label in model Meta class to work with Django 1.9
Fixes RemovedInDjango19Warning:
Model class db_mutex.models.DBMutex doesn't declare an explicit
app_label and either isn't in an application in INSTALLED_APPS or else
was imported before its application was loaded. This will no longer be
supported in Django 1.9.
|
Python
|
mit
|
minervaproject/django-db-mutex
|
from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=255, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
+ class Meta:
+ app_label = "db_mutex"
+
|
Declare app_label in model Meta class to work with Django 1.9
|
## Code Before:
from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=255, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
## Instruction:
Declare app_label in model Meta class to work with Django 1.9
## Code After:
from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=255, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "db_mutex"
|
from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=255, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
+
+ class Meta:
+ app_label = "db_mutex"
|
6028b113ed37489d51a68dc5f1ae6ec4c9a14540
|
jsk_apc2016_common/node_scripts/visualize_pick_json.py
|
jsk_apc2016_common/node_scripts/visualize_pick_json.py
|
import argparse
import cv_bridge
import rospy
from sensor_msgs.msg import Image
import jsk_apc2016_common
def publish_cb(event):
imgmsg.header.stamp = rospy.Time.now()
pub.publish(imgmsg)
if __name__ == '__main__':
rospy.init_node('visualize_pick_json')
pub = rospy.Publisher('~output', Image, queue_size=10)
parser = argparse.ArgumentParser()
parser.add_argument('json',
help='JSON file with bin_contents and work_order')
args = parser.parse_args(rospy.myargv()[1:])
json = args.json
img = jsk_apc2016_common.visualize_pick_json(json)
br = cv_bridge.CvBridge()
imgmsg = br.cv2_to_imgmsg(img, encoding='bgr8')
timer = rospy.Timer(rospy.Duration(0.1), publish_cb)
rospy.spin()
|
import argparse
import matplotlib.pyplot as plt
import cv_bridge
import rospy
from sensor_msgs.msg import Image
import jsk_apc2016_common
def visualize_cb(event):
if pub.get_num_connections() > 0:
imgmsg.header.stamp = rospy.Time.now()
pub.publish(imgmsg)
if display:
global displayed_img
img_rgb = img[:, :, ::-1]
plt.axis('off')
plt.tight_layout()
if displayed_img and displayed_img._imcache is None:
plt.close()
if displayed_img is None:
displayed_img = plt.imshow(img_rgb)
else:
displayed_img.set_data(img_rgb)
plt.pause(0.01)
if __name__ == '__main__':
rospy.init_node('visualize_pick_json')
pub = rospy.Publisher('~output', Image, queue_size=10)
parser = argparse.ArgumentParser()
parser.add_argument('json',
help='JSON file with bin_contents and work_order')
parser.add_argument('-d', '--display', action='store_true',
help='Display with a window')
args = parser.parse_args(rospy.myargv()[1:])
json = args.json
display = args.display
displayed_img = None
img = jsk_apc2016_common.visualize_pick_json(json)
br = cv_bridge.CvBridge()
imgmsg = br.cv2_to_imgmsg(img, encoding='bgr8')
timer = rospy.Timer(rospy.Duration(0.1), visualize_cb)
rospy.spin()
|
Add mode to display json with --display
|
Add mode to display json with --display
|
Python
|
bsd-3-clause
|
pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc
|
import argparse
+
+ import matplotlib.pyplot as plt
import cv_bridge
import rospy
from sensor_msgs.msg import Image
import jsk_apc2016_common
- def publish_cb(event):
+ def visualize_cb(event):
+ if pub.get_num_connections() > 0:
- imgmsg.header.stamp = rospy.Time.now()
+ imgmsg.header.stamp = rospy.Time.now()
- pub.publish(imgmsg)
+ pub.publish(imgmsg)
+ if display:
+ global displayed_img
+ img_rgb = img[:, :, ::-1]
+ plt.axis('off')
+ plt.tight_layout()
+ if displayed_img and displayed_img._imcache is None:
+ plt.close()
+ if displayed_img is None:
+ displayed_img = plt.imshow(img_rgb)
+ else:
+ displayed_img.set_data(img_rgb)
+ plt.pause(0.01)
if __name__ == '__main__':
rospy.init_node('visualize_pick_json')
pub = rospy.Publisher('~output', Image, queue_size=10)
parser = argparse.ArgumentParser()
parser.add_argument('json',
help='JSON file with bin_contents and work_order')
+ parser.add_argument('-d', '--display', action='store_true',
+ help='Display with a window')
args = parser.parse_args(rospy.myargv()[1:])
json = args.json
+ display = args.display
+ displayed_img = None
img = jsk_apc2016_common.visualize_pick_json(json)
br = cv_bridge.CvBridge()
imgmsg = br.cv2_to_imgmsg(img, encoding='bgr8')
- timer = rospy.Timer(rospy.Duration(0.1), publish_cb)
+ timer = rospy.Timer(rospy.Duration(0.1), visualize_cb)
rospy.spin()
|
Add mode to display json with --display
|
## Code Before:
import argparse
import cv_bridge
import rospy
from sensor_msgs.msg import Image
import jsk_apc2016_common
def publish_cb(event):
imgmsg.header.stamp = rospy.Time.now()
pub.publish(imgmsg)
if __name__ == '__main__':
rospy.init_node('visualize_pick_json')
pub = rospy.Publisher('~output', Image, queue_size=10)
parser = argparse.ArgumentParser()
parser.add_argument('json',
help='JSON file with bin_contents and work_order')
args = parser.parse_args(rospy.myargv()[1:])
json = args.json
img = jsk_apc2016_common.visualize_pick_json(json)
br = cv_bridge.CvBridge()
imgmsg = br.cv2_to_imgmsg(img, encoding='bgr8')
timer = rospy.Timer(rospy.Duration(0.1), publish_cb)
rospy.spin()
## Instruction:
Add mode to display json with --display
## Code After:
import argparse
import matplotlib.pyplot as plt
import cv_bridge
import rospy
from sensor_msgs.msg import Image
import jsk_apc2016_common
def visualize_cb(event):
if pub.get_num_connections() > 0:
imgmsg.header.stamp = rospy.Time.now()
pub.publish(imgmsg)
if display:
global displayed_img
img_rgb = img[:, :, ::-1]
plt.axis('off')
plt.tight_layout()
if displayed_img and displayed_img._imcache is None:
plt.close()
if displayed_img is None:
displayed_img = plt.imshow(img_rgb)
else:
displayed_img.set_data(img_rgb)
plt.pause(0.01)
if __name__ == '__main__':
rospy.init_node('visualize_pick_json')
pub = rospy.Publisher('~output', Image, queue_size=10)
parser = argparse.ArgumentParser()
parser.add_argument('json',
help='JSON file with bin_contents and work_order')
parser.add_argument('-d', '--display', action='store_true',
help='Display with a window')
args = parser.parse_args(rospy.myargv()[1:])
json = args.json
display = args.display
displayed_img = None
img = jsk_apc2016_common.visualize_pick_json(json)
br = cv_bridge.CvBridge()
imgmsg = br.cv2_to_imgmsg(img, encoding='bgr8')
timer = rospy.Timer(rospy.Duration(0.1), visualize_cb)
rospy.spin()
|
import argparse
+
+ import matplotlib.pyplot as plt
import cv_bridge
import rospy
from sensor_msgs.msg import Image
import jsk_apc2016_common
- def publish_cb(event):
? ^ ^ ^^
+ def visualize_cb(event):
? ^^^ ^ ^^
+ if pub.get_num_connections() > 0:
- imgmsg.header.stamp = rospy.Time.now()
+ imgmsg.header.stamp = rospy.Time.now()
? ++++
- pub.publish(imgmsg)
+ pub.publish(imgmsg)
? ++++
+ if display:
+ global displayed_img
+ img_rgb = img[:, :, ::-1]
+ plt.axis('off')
+ plt.tight_layout()
+ if displayed_img and displayed_img._imcache is None:
+ plt.close()
+ if displayed_img is None:
+ displayed_img = plt.imshow(img_rgb)
+ else:
+ displayed_img.set_data(img_rgb)
+ plt.pause(0.01)
if __name__ == '__main__':
rospy.init_node('visualize_pick_json')
pub = rospy.Publisher('~output', Image, queue_size=10)
parser = argparse.ArgumentParser()
parser.add_argument('json',
help='JSON file with bin_contents and work_order')
+ parser.add_argument('-d', '--display', action='store_true',
+ help='Display with a window')
args = parser.parse_args(rospy.myargv()[1:])
json = args.json
+ display = args.display
+ displayed_img = None
img = jsk_apc2016_common.visualize_pick_json(json)
br = cv_bridge.CvBridge()
imgmsg = br.cv2_to_imgmsg(img, encoding='bgr8')
- timer = rospy.Timer(rospy.Duration(0.1), publish_cb)
? ^ ^ ^^
+ timer = rospy.Timer(rospy.Duration(0.1), visualize_cb)
? ^^^ ^ ^^
rospy.spin()
|
945bb3897abb55e1b0f4f9fc97644bc22dd54208
|
simuvex/concretization_strategies/__init__.py
|
simuvex/concretization_strategies/__init__.py
|
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
from angr.concretization_strategies import *
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
Fix compat bug in concretization_strategies
|
Fix compat bug in concretization_strategies
|
Python
|
bsd-2-clause
|
angr/simuvex
|
+ from angr.concretization_strategies import *
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
Fix compat bug in concretization_strategies
|
## Code Before:
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
## Instruction:
Fix compat bug in concretization_strategies
## Code After:
from angr.concretization_strategies import *
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
+ from angr.concretization_strategies import *
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
0a035a93666eb98b460d083e5dd822d7adb0614f
|
us_ignite/blog/admin.py
|
us_ignite/blog/admin.py
|
from django import forms
from django.contrib import admin
from django.contrib.auth.models import User
from us_ignite.common import sanitizer
from us_ignite.blog.models import BlogLink, Post, PostAttachment
from tinymce.widgets import TinyMCE
class PostAdminForm(forms.ModelForm):
author = forms.ModelChoiceField(
queryset=User.objects.filter(is_superuser=True))
def clean_content(self):
if 'content' in self.cleaned_data:
return sanitizer.sanitize(self.cleaned_data['content'])
class Meta:
model = Post
widgets = {
'content': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
class PostAttachmentInline(admin.StackedInline):
model = PostAttachment
class PostAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'is_published', 'is_featured', 'status')
list_filter = ('status', 'publication_date')
search_fields = ('slug', 'title', 'body', 'summary')
date_hierarchy = 'publication_date'
prepopulated_fields = {'slug': ('title',)}
form = PostAdminForm
inlines = [PostAttachmentInline]
class BlogLinkAdmin(admin.ModelAdmin):
list_display = ('name', 'url')
search_fields = ('name', 'url')
list_filter = ('created', )
date_hierarchy = 'created'
admin.site.register(Post, PostAdmin)
admin.site.register(BlogLink, BlogLinkAdmin)
|
from django import forms
from django.contrib import admin
from django.contrib.auth.models import User
from us_ignite.common import sanitizer
from us_ignite.blog.models import BlogLink, Post, PostAttachment
from tinymce.widgets import TinyMCE
class PostAdminForm(forms.ModelForm):
author = forms.ModelChoiceField(
queryset=User.objects.filter(is_superuser=True), required=False)
def clean_content(self):
if 'content' in self.cleaned_data:
return sanitizer.sanitize(self.cleaned_data['content'])
class Meta:
model = Post
widgets = {
'content': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
class PostAttachmentInline(admin.StackedInline):
model = PostAttachment
class PostAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'is_published', 'is_featured', 'status')
list_filter = ('status', 'publication_date')
search_fields = ('slug', 'title', 'body', 'summary')
date_hierarchy = 'publication_date'
prepopulated_fields = {'slug': ('title',)}
form = PostAdminForm
inlines = [PostAttachmentInline]
class BlogLinkAdmin(admin.ModelAdmin):
list_display = ('name', 'url')
search_fields = ('name', 'url')
list_filter = ('created', )
date_hierarchy = 'created'
admin.site.register(Post, PostAdmin)
admin.site.register(BlogLink, BlogLinkAdmin)
|
Make the blog author in the form a non required field.
|
Make the blog author in the form a non required field.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
from django import forms
from django.contrib import admin
from django.contrib.auth.models import User
from us_ignite.common import sanitizer
from us_ignite.blog.models import BlogLink, Post, PostAttachment
from tinymce.widgets import TinyMCE
class PostAdminForm(forms.ModelForm):
author = forms.ModelChoiceField(
- queryset=User.objects.filter(is_superuser=True))
+ queryset=User.objects.filter(is_superuser=True), required=False)
def clean_content(self):
if 'content' in self.cleaned_data:
return sanitizer.sanitize(self.cleaned_data['content'])
class Meta:
model = Post
widgets = {
'content': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
class PostAttachmentInline(admin.StackedInline):
model = PostAttachment
class PostAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'is_published', 'is_featured', 'status')
list_filter = ('status', 'publication_date')
search_fields = ('slug', 'title', 'body', 'summary')
date_hierarchy = 'publication_date'
prepopulated_fields = {'slug': ('title',)}
form = PostAdminForm
inlines = [PostAttachmentInline]
class BlogLinkAdmin(admin.ModelAdmin):
list_display = ('name', 'url')
search_fields = ('name', 'url')
list_filter = ('created', )
date_hierarchy = 'created'
admin.site.register(Post, PostAdmin)
admin.site.register(BlogLink, BlogLinkAdmin)
|
Make the blog author in the form a non required field.
|
## Code Before:
from django import forms
from django.contrib import admin
from django.contrib.auth.models import User
from us_ignite.common import sanitizer
from us_ignite.blog.models import BlogLink, Post, PostAttachment
from tinymce.widgets import TinyMCE
class PostAdminForm(forms.ModelForm):
author = forms.ModelChoiceField(
queryset=User.objects.filter(is_superuser=True))
def clean_content(self):
if 'content' in self.cleaned_data:
return sanitizer.sanitize(self.cleaned_data['content'])
class Meta:
model = Post
widgets = {
'content': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
class PostAttachmentInline(admin.StackedInline):
model = PostAttachment
class PostAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'is_published', 'is_featured', 'status')
list_filter = ('status', 'publication_date')
search_fields = ('slug', 'title', 'body', 'summary')
date_hierarchy = 'publication_date'
prepopulated_fields = {'slug': ('title',)}
form = PostAdminForm
inlines = [PostAttachmentInline]
class BlogLinkAdmin(admin.ModelAdmin):
list_display = ('name', 'url')
search_fields = ('name', 'url')
list_filter = ('created', )
date_hierarchy = 'created'
admin.site.register(Post, PostAdmin)
admin.site.register(BlogLink, BlogLinkAdmin)
## Instruction:
Make the blog author in the form a non required field.
## Code After:
from django import forms
from django.contrib import admin
from django.contrib.auth.models import User
from us_ignite.common import sanitizer
from us_ignite.blog.models import BlogLink, Post, PostAttachment
from tinymce.widgets import TinyMCE
class PostAdminForm(forms.ModelForm):
author = forms.ModelChoiceField(
queryset=User.objects.filter(is_superuser=True), required=False)
def clean_content(self):
if 'content' in self.cleaned_data:
return sanitizer.sanitize(self.cleaned_data['content'])
class Meta:
model = Post
widgets = {
'content': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
class PostAttachmentInline(admin.StackedInline):
model = PostAttachment
class PostAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'is_published', 'is_featured', 'status')
list_filter = ('status', 'publication_date')
search_fields = ('slug', 'title', 'body', 'summary')
date_hierarchy = 'publication_date'
prepopulated_fields = {'slug': ('title',)}
form = PostAdminForm
inlines = [PostAttachmentInline]
class BlogLinkAdmin(admin.ModelAdmin):
list_display = ('name', 'url')
search_fields = ('name', 'url')
list_filter = ('created', )
date_hierarchy = 'created'
admin.site.register(Post, PostAdmin)
admin.site.register(BlogLink, BlogLinkAdmin)
|
from django import forms
from django.contrib import admin
from django.contrib.auth.models import User
from us_ignite.common import sanitizer
from us_ignite.blog.models import BlogLink, Post, PostAttachment
from tinymce.widgets import TinyMCE
class PostAdminForm(forms.ModelForm):
author = forms.ModelChoiceField(
- queryset=User.objects.filter(is_superuser=True))
+ queryset=User.objects.filter(is_superuser=True), required=False)
? ++++++++++++++++
def clean_content(self):
if 'content' in self.cleaned_data:
return sanitizer.sanitize(self.cleaned_data['content'])
class Meta:
model = Post
widgets = {
'content': TinyMCE(attrs={'cols': 80, 'rows': 30}),
}
class PostAttachmentInline(admin.StackedInline):
model = PostAttachment
class PostAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'is_published', 'is_featured', 'status')
list_filter = ('status', 'publication_date')
search_fields = ('slug', 'title', 'body', 'summary')
date_hierarchy = 'publication_date'
prepopulated_fields = {'slug': ('title',)}
form = PostAdminForm
inlines = [PostAttachmentInline]
class BlogLinkAdmin(admin.ModelAdmin):
list_display = ('name', 'url')
search_fields = ('name', 'url')
list_filter = ('created', )
date_hierarchy = 'created'
admin.site.register(Post, PostAdmin)
admin.site.register(BlogLink, BlogLinkAdmin)
|
175bbd2f181d067712d38beeca9df4063654103a
|
nlppln/frog_to_saf.py
|
nlppln/frog_to_saf.py
|
import click
import os
import codecs
import json
from xtas.tasks._frog import parse_frog, frog_to_saf
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_dir', nargs=1, type=click.Path())
def frog2saf(input_files, output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for fi in input_files:
with codecs.open(fi) as f:
lines = f.readlines()
lines = [line.strip() for line in lines]
saf_data = frog_to_saf(parse_frog(lines))
head, tail = os.path.split(fi)
out_file = os.path.join(output_dir, '{}.json'.format(tail))
with codecs.open(out_file, 'wb', encoding='utf-8') as f:
json.dump(saf_data, f, indent=4)
if __name__ == '__main__':
frog2saf()
|
import click
import os
import codecs
import json
from xtas.tasks._frog import parse_frog, frog_to_saf
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_dir', nargs=1, type=click.Path())
def frog2saf(input_files, output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for fi in input_files:
with codecs.open(fi) as f:
lines = f.readlines()
lines = [line.strip() for line in lines]
saf_data = frog_to_saf(parse_frog(lines))
head, tail = os.path.split(fi)
fname = tail.replace(os.path.splitext(tail)[1], '')
out_file = os.path.join(output_dir, '{}.json'.format(fname))
with codecs.open(out_file, 'wb', encoding='utf-8') as f:
json.dump(saf_data, f, indent=4)
if __name__ == '__main__':
frog2saf()
|
Update script to remove extension from filename
|
Update script to remove extension from filename
Before, the script added the extension .json to whatever the file
name was. Now, it removes the last extension and then appends .json.
|
Python
|
apache-2.0
|
WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln
|
import click
import os
import codecs
import json
from xtas.tasks._frog import parse_frog, frog_to_saf
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_dir', nargs=1, type=click.Path())
def frog2saf(input_files, output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for fi in input_files:
with codecs.open(fi) as f:
lines = f.readlines()
lines = [line.strip() for line in lines]
saf_data = frog_to_saf(parse_frog(lines))
head, tail = os.path.split(fi)
+ fname = tail.replace(os.path.splitext(tail)[1], '')
- out_file = os.path.join(output_dir, '{}.json'.format(tail))
+ out_file = os.path.join(output_dir, '{}.json'.format(fname))
with codecs.open(out_file, 'wb', encoding='utf-8') as f:
json.dump(saf_data, f, indent=4)
if __name__ == '__main__':
frog2saf()
|
Update script to remove extension from filename
|
## Code Before:
import click
import os
import codecs
import json
from xtas.tasks._frog import parse_frog, frog_to_saf
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_dir', nargs=1, type=click.Path())
def frog2saf(input_files, output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for fi in input_files:
with codecs.open(fi) as f:
lines = f.readlines()
lines = [line.strip() for line in lines]
saf_data = frog_to_saf(parse_frog(lines))
head, tail = os.path.split(fi)
out_file = os.path.join(output_dir, '{}.json'.format(tail))
with codecs.open(out_file, 'wb', encoding='utf-8') as f:
json.dump(saf_data, f, indent=4)
if __name__ == '__main__':
frog2saf()
## Instruction:
Update script to remove extension from filename
## Code After:
import click
import os
import codecs
import json
from xtas.tasks._frog import parse_frog, frog_to_saf
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_dir', nargs=1, type=click.Path())
def frog2saf(input_files, output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for fi in input_files:
with codecs.open(fi) as f:
lines = f.readlines()
lines = [line.strip() for line in lines]
saf_data = frog_to_saf(parse_frog(lines))
head, tail = os.path.split(fi)
fname = tail.replace(os.path.splitext(tail)[1], '')
out_file = os.path.join(output_dir, '{}.json'.format(fname))
with codecs.open(out_file, 'wb', encoding='utf-8') as f:
json.dump(saf_data, f, indent=4)
if __name__ == '__main__':
frog2saf()
|
import click
import os
import codecs
import json
from xtas.tasks._frog import parse_frog, frog_to_saf
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_dir', nargs=1, type=click.Path())
def frog2saf(input_files, output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for fi in input_files:
with codecs.open(fi) as f:
lines = f.readlines()
lines = [line.strip() for line in lines]
saf_data = frog_to_saf(parse_frog(lines))
head, tail = os.path.split(fi)
+ fname = tail.replace(os.path.splitext(tail)[1], '')
- out_file = os.path.join(output_dir, '{}.json'.format(tail))
? ^ ^^
+ out_file = os.path.join(output_dir, '{}.json'.format(fname))
? ^^ ^^
with codecs.open(out_file, 'wb', encoding='utf-8') as f:
json.dump(saf_data, f, indent=4)
if __name__ == '__main__':
frog2saf()
|
4c5cf98be65ee2564062cce2a43b7833eef1a6c9
|
AFQ/utils/volume.py
|
AFQ/utils/volume.py
|
import scipy.ndimage as ndim
from skimage.filters import gaussian
def patch_up_roi(roi, sigma=0.5, truncate=2):
"""
After being non-linearly transformed, ROIs tend to have holes in them.
We perform a couple of computational geometry operations on the ROI to
fix that up.
Parameters
----------
roi : 3D binary array
The ROI after it has been transformed.
sigma : float
The sigma for initial Gaussian smoothing.
truncate : float
The truncation for the Gaussian
Returns
-------
ROI after dilation and hole-filling
"""
return (ndim.binary_fill_holes(
gaussian(roi, sigma=sigma, truncate=truncate)).astype(float) > 0.1)
|
import scipy.ndimage as ndim
from skimage.filters import gaussian
from skimage.morphology import convex_hull_image
def patch_up_roi(roi, sigma=0.5, truncate=2):
"""
After being non-linearly transformed, ROIs tend to have holes in them.
We perform a couple of computational geometry operations on the ROI to
fix that up.
Parameters
----------
roi : 3D binary array
The ROI after it has been transformed.
sigma : float
The sigma for initial Gaussian smoothing.
truncate : float
The truncation for the Gaussian
Returns
-------
ROI after dilation and hole-filling
"""
return convex_hull_image(gaussian(ndim.binary_fill_holes(roi),
sigma=sigma, truncate=truncate) > 0.1)
|
Add a convex hull operation to really close this up.
|
Add a convex hull operation to really close this up.
|
Python
|
bsd-2-clause
|
yeatmanlab/pyAFQ,arokem/pyAFQ,arokem/pyAFQ,yeatmanlab/pyAFQ
|
import scipy.ndimage as ndim
from skimage.filters import gaussian
-
+ from skimage.morphology import convex_hull_image
def patch_up_roi(roi, sigma=0.5, truncate=2):
"""
After being non-linearly transformed, ROIs tend to have holes in them.
We perform a couple of computational geometry operations on the ROI to
fix that up.
Parameters
----------
roi : 3D binary array
The ROI after it has been transformed.
sigma : float
The sigma for initial Gaussian smoothing.
truncate : float
The truncation for the Gaussian
Returns
-------
ROI after dilation and hole-filling
"""
- return (ndim.binary_fill_holes(
- gaussian(roi, sigma=sigma, truncate=truncate)).astype(float) > 0.1)
+ return convex_hull_image(gaussian(ndim.binary_fill_holes(roi),
+ sigma=sigma, truncate=truncate) > 0.1)
|
Add a convex hull operation to really close this up.
|
## Code Before:
import scipy.ndimage as ndim
from skimage.filters import gaussian
def patch_up_roi(roi, sigma=0.5, truncate=2):
"""
After being non-linearly transformed, ROIs tend to have holes in them.
We perform a couple of computational geometry operations on the ROI to
fix that up.
Parameters
----------
roi : 3D binary array
The ROI after it has been transformed.
sigma : float
The sigma for initial Gaussian smoothing.
truncate : float
The truncation for the Gaussian
Returns
-------
ROI after dilation and hole-filling
"""
return (ndim.binary_fill_holes(
gaussian(roi, sigma=sigma, truncate=truncate)).astype(float) > 0.1)
## Instruction:
Add a convex hull operation to really close this up.
## Code After:
import scipy.ndimage as ndim
from skimage.filters import gaussian
from skimage.morphology import convex_hull_image
def patch_up_roi(roi, sigma=0.5, truncate=2):
"""
After being non-linearly transformed, ROIs tend to have holes in them.
We perform a couple of computational geometry operations on the ROI to
fix that up.
Parameters
----------
roi : 3D binary array
The ROI after it has been transformed.
sigma : float
The sigma for initial Gaussian smoothing.
truncate : float
The truncation for the Gaussian
Returns
-------
ROI after dilation and hole-filling
"""
return convex_hull_image(gaussian(ndim.binary_fill_holes(roi),
sigma=sigma, truncate=truncate) > 0.1)
|
import scipy.ndimage as ndim
from skimage.filters import gaussian
-
+ from skimage.morphology import convex_hull_image
def patch_up_roi(roi, sigma=0.5, truncate=2):
"""
After being non-linearly transformed, ROIs tend to have holes in them.
We perform a couple of computational geometry operations on the ROI to
fix that up.
Parameters
----------
roi : 3D binary array
The ROI after it has been transformed.
sigma : float
The sigma for initial Gaussian smoothing.
truncate : float
The truncation for the Gaussian
Returns
-------
ROI after dilation and hole-filling
"""
- return (ndim.binary_fill_holes(
- gaussian(roi, sigma=sigma, truncate=truncate)).astype(float) > 0.1)
+ return convex_hull_image(gaussian(ndim.binary_fill_holes(roi),
+ sigma=sigma, truncate=truncate) > 0.1)
|
d659c685f40de7eb7b2ccd007888177fb158e139
|
tests/integration/players.py
|
tests/integration/players.py
|
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
|
import requests
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
r = requests.post(url, params=values, verify=False)
r.raise_for_status()
if (r.status_code == 201):
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username,
password,
email))
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
|
Switch to requests library instead of urllib
|
Switch to requests library instead of urllib
|
Python
|
mit
|
dropshot/dropshot-server
|
- import urllib.parse
+
- import urllib.request
+ import requests
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
- data = urllib.parse.urlencode(values)
- data = data.encode('utf-8') # data should be bytes
- req = urllib.request.Request(url, data)
- response = urllib.request.urlopen(req)
- the_page = response.read()
+ r = requests.post(url, params=values, verify=False)
+
+ r.raise_for_status()
+
+ if (r.status_code == 201):
- print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
+ print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username,
+ password,
+ email))
+
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
|
Switch to requests library instead of urllib
|
## Code Before:
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
## Instruction:
Switch to requests library instead of urllib
## Code After:
import requests
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
r = requests.post(url, params=values, verify=False)
r.raise_for_status()
if (r.status_code == 201):
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username,
password,
email))
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
|
- import urllib.parse
+
- import urllib.request
? -------
+ import requests
? +
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
- data = urllib.parse.urlencode(values)
- data = data.encode('utf-8') # data should be bytes
- req = urllib.request.Request(url, data)
- response = urllib.request.urlopen(req)
- the_page = response.read()
+ r = requests.post(url, params=values, verify=False)
+
+ r.raise_for_status()
+
+ if (r.status_code == 201):
- print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
? ------------------
+ print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username,
? ++++
+ password,
+ email))
+
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
|
a6ac5c901a1b677992599d6aac231e01c5e7a39d
|
tests/test_thread_concurrency.py
|
tests/test_thread_concurrency.py
|
'''
@author: Rahul Tanwani
@summary: Test cases to make sure sequential execution and concurrent execution return
the same response.
'''
import json
from tests.test_base import TestBase
from batch_requests.settings import br_settings
from batch_requests.concurrent.executor import ThreadBasedExecutor
class TestThreadConcurrency(TestBase):
'''
Tests sequential and concurrent execution.
'''
# FIXME: Find the better way to manage / update settings.
def setUp(self):
'''
Change the concurrency settings.
'''
self.number_workers = 10
self.orig_executor = br_settings.executor
def tearDown(self):
# Restore the original batch requests settings.
br_settings.executor = self.orig_executor
def test_thread_concurrency_response(self):
'''
Make a request with sequential and thread based executor and compare
the response.
'''
data = json.dumps({"text": "Batch"})
# Make a batch call for GET, POST and PUT request.
get_req = ("get", "/views/", '', {})
post_req = ("post", "/views/", data, {"content_type": "text/plain"})
put_req = ("put", "/views/", data, {"content_type": "text/plain"})
# Get the response for a batch request.
batch_requests = self.make_multiple_batch_request([get_req, post_req, put_req])
# FIXME: Find the better way to manage / update settings.
# Update the settings.
br_settings.executor = ThreadBasedExecutor(self.number_workers)
threaded_batch_requests = self.make_multiple_batch_request([get_req, post_req, put_req])
self.assertEqual(batch_requests.content, threaded_batch_requests.content, "Sequential and concurrent response not same!")
|
'''
@author: Rahul Tanwani
@summary: Test cases to make sure sequential execution and concurrent execution return
the same response.
'''
from tests.test_concurrency_base import TestBaseConcurrency
from batch_requests.concurrent.executor import ThreadBasedExecutor
class TestThreadConcurrency(TestBaseConcurrency):
'''
Tests sequential and concurrent execution.
'''
def get_executor(self):
'''
Returns the executor to use for running tests defined in this suite.
'''
return ThreadBasedExecutor(self.number_workers)
def test_thread_concurrency_response(self):
'''
Make a request with sequential and concurrency based executor and compare
the response.
'''
self.compare_seq_and_concurrent_req()
|
Refactor thread based concurrency tests
|
Refactor thread based concurrency tests
|
Python
|
mit
|
tanwanirahul/django-batch-requests
|
'''
@author: Rahul Tanwani
@summary: Test cases to make sure sequential execution and concurrent execution return
the same response.
'''
- import json
- from tests.test_base import TestBase
+ from tests.test_concurrency_base import TestBaseConcurrency
- from batch_requests.settings import br_settings
from batch_requests.concurrent.executor import ThreadBasedExecutor
- class TestThreadConcurrency(TestBase):
+ class TestThreadConcurrency(TestBaseConcurrency):
'''
Tests sequential and concurrent execution.
'''
+ def get_executor(self):
- # FIXME: Find the better way to manage / update settings.
- def setUp(self):
'''
- Change the concurrency settings.
+ Returns the executor to use for running tests defined in this suite.
'''
+ return ThreadBasedExecutor(self.number_workers)
- self.number_workers = 10
- self.orig_executor = br_settings.executor
-
- def tearDown(self):
- # Restore the original batch requests settings.
- br_settings.executor = self.orig_executor
def test_thread_concurrency_response(self):
'''
- Make a request with sequential and thread based executor and compare
+ Make a request with sequential and concurrency based executor and compare
the response.
'''
- data = json.dumps({"text": "Batch"})
+ self.compare_seq_and_concurrent_req()
- # Make a batch call for GET, POST and PUT request.
- get_req = ("get", "/views/", '', {})
- post_req = ("post", "/views/", data, {"content_type": "text/plain"})
- put_req = ("put", "/views/", data, {"content_type": "text/plain"})
-
- # Get the response for a batch request.
- batch_requests = self.make_multiple_batch_request([get_req, post_req, put_req])
-
- # FIXME: Find the better way to manage / update settings.
- # Update the settings.
- br_settings.executor = ThreadBasedExecutor(self.number_workers)
- threaded_batch_requests = self.make_multiple_batch_request([get_req, post_req, put_req])
-
- self.assertEqual(batch_requests.content, threaded_batch_requests.content, "Sequential and concurrent response not same!")
-
|
Refactor thread based concurrency tests
|
## Code Before:
'''
@author: Rahul Tanwani
@summary: Test cases to make sure sequential execution and concurrent execution return
the same response.
'''
import json
from tests.test_base import TestBase
from batch_requests.settings import br_settings
from batch_requests.concurrent.executor import ThreadBasedExecutor
class TestThreadConcurrency(TestBase):
'''
Tests sequential and concurrent execution.
'''
# FIXME: Find the better way to manage / update settings.
def setUp(self):
'''
Change the concurrency settings.
'''
self.number_workers = 10
self.orig_executor = br_settings.executor
def tearDown(self):
# Restore the original batch requests settings.
br_settings.executor = self.orig_executor
def test_thread_concurrency_response(self):
'''
Make a request with sequential and thread based executor and compare
the response.
'''
data = json.dumps({"text": "Batch"})
# Make a batch call for GET, POST and PUT request.
get_req = ("get", "/views/", '', {})
post_req = ("post", "/views/", data, {"content_type": "text/plain"})
put_req = ("put", "/views/", data, {"content_type": "text/plain"})
# Get the response for a batch request.
batch_requests = self.make_multiple_batch_request([get_req, post_req, put_req])
# FIXME: Find the better way to manage / update settings.
# Update the settings.
br_settings.executor = ThreadBasedExecutor(self.number_workers)
threaded_batch_requests = self.make_multiple_batch_request([get_req, post_req, put_req])
self.assertEqual(batch_requests.content, threaded_batch_requests.content, "Sequential and concurrent response not same!")
## Instruction:
Refactor thread based concurrency tests
## Code After:
'''
@author: Rahul Tanwani
@summary: Test cases to make sure sequential execution and concurrent execution return
the same response.
'''
from tests.test_concurrency_base import TestBaseConcurrency
from batch_requests.concurrent.executor import ThreadBasedExecutor
class TestThreadConcurrency(TestBaseConcurrency):
'''
Tests sequential and concurrent execution.
'''
def get_executor(self):
'''
Returns the executor to use for running tests defined in this suite.
'''
return ThreadBasedExecutor(self.number_workers)
def test_thread_concurrency_response(self):
'''
Make a request with sequential and concurrency based executor and compare
the response.
'''
self.compare_seq_and_concurrent_req()
|
'''
@author: Rahul Tanwani
@summary: Test cases to make sure sequential execution and concurrent execution return
the same response.
'''
- import json
- from tests.test_base import TestBase
+ from tests.test_concurrency_base import TestBaseConcurrency
? ++++++++++++ +++++++++++
- from batch_requests.settings import br_settings
from batch_requests.concurrent.executor import ThreadBasedExecutor
- class TestThreadConcurrency(TestBase):
+ class TestThreadConcurrency(TestBaseConcurrency):
? +++++++++++
'''
Tests sequential and concurrent execution.
'''
+ def get_executor(self):
- # FIXME: Find the better way to manage / update settings.
- def setUp(self):
'''
- Change the concurrency settings.
+ Returns the executor to use for running tests defined in this suite.
'''
+ return ThreadBasedExecutor(self.number_workers)
- self.number_workers = 10
- self.orig_executor = br_settings.executor
-
- def tearDown(self):
- # Restore the original batch requests settings.
- br_settings.executor = self.orig_executor
def test_thread_concurrency_response(self):
'''
- Make a request with sequential and thread based executor and compare
? ^^ ^^
+ Make a request with sequential and concurrency based executor and compare
? ^^^^^^ ^^^
the response.
'''
+ self.compare_seq_and_concurrent_req()
- data = json.dumps({"text": "Batch"})
-
- # Make a batch call for GET, POST and PUT request.
- get_req = ("get", "/views/", '', {})
- post_req = ("post", "/views/", data, {"content_type": "text/plain"})
- put_req = ("put", "/views/", data, {"content_type": "text/plain"})
-
- # Get the response for a batch request.
- batch_requests = self.make_multiple_batch_request([get_req, post_req, put_req])
-
- # FIXME: Find the better way to manage / update settings.
- # Update the settings.
- br_settings.executor = ThreadBasedExecutor(self.number_workers)
- threaded_batch_requests = self.make_multiple_batch_request([get_req, post_req, put_req])
-
- self.assertEqual(batch_requests.content, threaded_batch_requests.content, "Sequential and concurrent response not same!")
|
2e1b189727616b4c93ad4244299530c738304428
|
httpobs/scanner/utils.py
|
httpobs/scanner/utils.py
|
import socket
import tld
def valid_hostname(hostname: str) -> bool:
"""
:param hostname: The hostname requested in the scan
:return: True if it's a valid hostname (fqdn in DNS that's not an IP address), False otherwise
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return False # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return False
except:
pass
# Then, let's see if it's a TLD; this includes things fuel.aero or co.uk that look like fqdns but aren't
if hostname in tld.get_tld_names():
return False
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return False
except:
return False
# If we've made it this far, then everything is good to go! Woohoo!
return True
|
import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
|
Remove TLD check, allow for www
|
Remove TLD check, allow for www
|
Python
|
mpl-2.0
|
april/http-observatory,april/http-observatory,april/http-observatory,mozilla/http-observatory,mozilla/http-observatory,mozilla/http-observatory
|
import socket
- import tld
- def valid_hostname(hostname: str) -> bool:
+ def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
- :return: True if it's a valid hostname (fqdn in DNS that's not an IP address), False otherwise
+ :return: Hostname if it's valid, otherwise None
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
- return False # If we get this far, it's an IP address and therefore not a valid fqdn
+ return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
- return False
+ return None
except:
pass
-
- # Then, let's see if it's a TLD; this includes things fuel.aero or co.uk that look like fqdns but aren't
- if hostname in tld.get_tld_names():
- return False
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
- return False
+ return None
except:
- return False
+ return None
# If we've made it this far, then everything is good to go! Woohoo!
- return True
+ return hostname
|
Remove TLD check, allow for www
|
## Code Before:
import socket
import tld
def valid_hostname(hostname: str) -> bool:
"""
:param hostname: The hostname requested in the scan
:return: True if it's a valid hostname (fqdn in DNS that's not an IP address), False otherwise
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return False # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return False
except:
pass
# Then, let's see if it's a TLD; this includes things fuel.aero or co.uk that look like fqdns but aren't
if hostname in tld.get_tld_names():
return False
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return False
except:
return False
# If we've made it this far, then everything is good to go! Woohoo!
return True
## Instruction:
Remove TLD check, allow for www
## Code After:
import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
|
import socket
- import tld
- def valid_hostname(hostname: str) -> bool:
? --------
+ def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
- :return: True if it's a valid hostname (fqdn in DNS that's not an IP address), False otherwise
+ :return: Hostname if it's valid, otherwise None
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
- return False # If we get this far, it's an IP address and therefore not a valid fqdn
? ^^^^
+ return None # If we get this far, it's an IP address and therefore not a valid fqdn
? ^^^
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
- return False
? ^^^^
+ return None
? ^^^
except:
pass
-
- # Then, let's see if it's a TLD; this includes things fuel.aero or co.uk that look like fqdns but aren't
- if hostname in tld.get_tld_names():
- return False
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
- return False
? ^^^^
+ return None
? ^^^
except:
- return False
? ^^^^
+ return None
? ^^^
# If we've made it this far, then everything is good to go! Woohoo!
- return True
+ return hostname
|
f1957185f0d93861a8ed319223f574df8f4e838f
|
src/graphql_relay/node/plural.py
|
src/graphql_relay/node/plural.py
|
from typing import Any, Callable
from graphql.type import (
GraphQLArgument,
GraphQLField,
GraphQLInputType,
GraphQLOutputType,
GraphQLList,
GraphQLNonNull,
GraphQLResolveInfo,
)
def plural_identifying_root_field(
arg_name: str,
input_type: GraphQLInputType,
output_type: GraphQLOutputType,
resolve_single_input: Callable[[GraphQLResolveInfo, str], Any],
description: str = None,
) -> GraphQLField:
if isinstance(input_type, GraphQLNonNull):
input_type = input_type.of_type
input_args = {
arg_name: GraphQLArgument(
GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type)))
)
}
def resolve(_obj, info, **args):
inputs = args[arg_name]
return [resolve_single_input(info, input_) for input_ in inputs]
return GraphQLField(
GraphQLList(output_type),
description=description,
args=input_args,
resolve=resolve,
)
|
from typing import Any, Callable
from graphql.type import (
GraphQLArgument,
GraphQLField,
GraphQLInputType,
GraphQLOutputType,
GraphQLList,
GraphQLNonNull,
GraphQLResolveInfo,
is_non_null_type,
)
def plural_identifying_root_field(
arg_name: str,
input_type: GraphQLInputType,
output_type: GraphQLOutputType,
resolve_single_input: Callable[[GraphQLResolveInfo, str], Any],
description: str = None,
) -> GraphQLField:
if is_non_null_type(input_type):
input_type = input_type.of_type
input_args = {
arg_name: GraphQLArgument(
GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type)))
)
}
def resolve(_obj, info, **args):
inputs = args[arg_name]
return [resolve_single_input(info, input_) for input_ in inputs]
return GraphQLField(
GraphQLList(output_type),
description=description,
args=input_args,
resolve=resolve,
)
|
Use graphql's predicate function instead of 'isinstance'
|
Use graphql's predicate function instead of 'isinstance'
Replicates graphql/graphql-relay-js@5b428507ef246be7ca3afb3589c410874a57f9bc
|
Python
|
mit
|
graphql-python/graphql-relay-py
|
from typing import Any, Callable
from graphql.type import (
GraphQLArgument,
GraphQLField,
GraphQLInputType,
GraphQLOutputType,
GraphQLList,
GraphQLNonNull,
GraphQLResolveInfo,
+ is_non_null_type,
)
def plural_identifying_root_field(
arg_name: str,
input_type: GraphQLInputType,
output_type: GraphQLOutputType,
resolve_single_input: Callable[[GraphQLResolveInfo, str], Any],
description: str = None,
) -> GraphQLField:
- if isinstance(input_type, GraphQLNonNull):
+ if is_non_null_type(input_type):
input_type = input_type.of_type
input_args = {
arg_name: GraphQLArgument(
GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type)))
)
}
def resolve(_obj, info, **args):
inputs = args[arg_name]
return [resolve_single_input(info, input_) for input_ in inputs]
return GraphQLField(
GraphQLList(output_type),
description=description,
args=input_args,
resolve=resolve,
)
|
Use graphql's predicate function instead of 'isinstance'
|
## Code Before:
from typing import Any, Callable
from graphql.type import (
GraphQLArgument,
GraphQLField,
GraphQLInputType,
GraphQLOutputType,
GraphQLList,
GraphQLNonNull,
GraphQLResolveInfo,
)
def plural_identifying_root_field(
arg_name: str,
input_type: GraphQLInputType,
output_type: GraphQLOutputType,
resolve_single_input: Callable[[GraphQLResolveInfo, str], Any],
description: str = None,
) -> GraphQLField:
if isinstance(input_type, GraphQLNonNull):
input_type = input_type.of_type
input_args = {
arg_name: GraphQLArgument(
GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type)))
)
}
def resolve(_obj, info, **args):
inputs = args[arg_name]
return [resolve_single_input(info, input_) for input_ in inputs]
return GraphQLField(
GraphQLList(output_type),
description=description,
args=input_args,
resolve=resolve,
)
## Instruction:
Use graphql's predicate function instead of 'isinstance'
## Code After:
from typing import Any, Callable
from graphql.type import (
GraphQLArgument,
GraphQLField,
GraphQLInputType,
GraphQLOutputType,
GraphQLList,
GraphQLNonNull,
GraphQLResolveInfo,
is_non_null_type,
)
def plural_identifying_root_field(
arg_name: str,
input_type: GraphQLInputType,
output_type: GraphQLOutputType,
resolve_single_input: Callable[[GraphQLResolveInfo, str], Any],
description: str = None,
) -> GraphQLField:
if is_non_null_type(input_type):
input_type = input_type.of_type
input_args = {
arg_name: GraphQLArgument(
GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type)))
)
}
def resolve(_obj, info, **args):
inputs = args[arg_name]
return [resolve_single_input(info, input_) for input_ in inputs]
return GraphQLField(
GraphQLList(output_type),
description=description,
args=input_args,
resolve=resolve,
)
|
from typing import Any, Callable
from graphql.type import (
GraphQLArgument,
GraphQLField,
GraphQLInputType,
GraphQLOutputType,
GraphQLList,
GraphQLNonNull,
GraphQLResolveInfo,
+ is_non_null_type,
)
def plural_identifying_root_field(
arg_name: str,
input_type: GraphQLInputType,
output_type: GraphQLOutputType,
resolve_single_input: Callable[[GraphQLResolveInfo, str], Any],
description: str = None,
) -> GraphQLField:
- if isinstance(input_type, GraphQLNonNull):
+ if is_non_null_type(input_type):
input_type = input_type.of_type
input_args = {
arg_name: GraphQLArgument(
GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type)))
)
}
def resolve(_obj, info, **args):
inputs = args[arg_name]
return [resolve_single_input(info, input_) for input_ in inputs]
return GraphQLField(
GraphQLList(output_type),
description=description,
args=input_args,
resolve=resolve,
)
|
f49276f78b1b303df5fed063e226ee78165baff5
|
spam_lists/exceptions.py
|
spam_lists/exceptions.py
|
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError, ValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsError, ValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsError, ValueError):
'''The value is not a valid url'''
|
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsValueError):
'''The value is not a valid url'''
|
Change bases of exception classes extending SpamListsError and ValueError
|
Change bases of exception classes extending SpamListsError and ValueError
This commit removes SpamListsError and ValueError as direct base classes
of other exception classes (except SpamListsValueError), and replaces
them with SpamListsValueError.
|
Python
|
mit
|
piotr-rusin/spam-lists
|
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
- class UnathorizedAPIKeyError(SpamListsError, ValueError):
+ class UnathorizedAPIKeyError(SpamListsValueError):
'''The API key used to query the service was not authorized'''
- class InvalidHostError(SpamListsError, ValueError):
+ class InvalidHostError(SpamListsValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
- class InvalidURLError(SpamListsError, ValueError):
+ class InvalidURLError(SpamListsValueError):
'''The value is not a valid url'''
|
Change bases of exception classes extending SpamListsError and ValueError
|
## Code Before:
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError, ValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsError, ValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsError, ValueError):
'''The value is not a valid url'''
## Instruction:
Change bases of exception classes extending SpamListsError and ValueError
## Code After:
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsValueError):
'''The value is not a valid url'''
|
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
- class UnathorizedAPIKeyError(SpamListsError, ValueError):
? -------
+ class UnathorizedAPIKeyError(SpamListsValueError):
'''The API key used to query the service was not authorized'''
- class InvalidHostError(SpamListsError, ValueError):
? -------
+ class InvalidHostError(SpamListsValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
- class InvalidURLError(SpamListsError, ValueError):
? -------
+ class InvalidURLError(SpamListsValueError):
'''The value is not a valid url'''
|
a4f010ed53615dcbe48c08a445e7d64045001133
|
base_comment_template/tests/test_base_comment_template.py
|
base_comment_template/tests/test_base_comment_template.py
|
from odoo.tests.common import TransactionCase
class TestResPartner(TransactionCase):
def setUp(self):
self.template_id = self.env['base.comment.template'].create({
'name': 'Comment before lines',
'position': 'before_lines',
'text': 'Text before lines',
})
def test_commercial_partner_fields(self):
# Azure Interior
partner_id = self.env.ref('base.res_partner_12')
partner_id.property_comment_template_id = self.template_id.id
# Test childs propagation of commercial partner field
for child_id in partner_id.child_ids:
self.assertEqual(
child_id.property_comment_template_id == self.template_id)
|
from odoo.tests.common import TransactionCase
class TestResPartner(TransactionCase):
def setUp(self):
super(TestResPartner, self).setUp()
self.template_id = self.env['base.comment.template'].create({
'name': 'Comment before lines',
'position': 'before_lines',
'text': 'Text before lines',
})
def test_commercial_partner_fields(self):
# Azure Interior
partner_id = self.env.ref('base.res_partner_12')
partner_id.property_comment_template_id = self.template_id.id
# Test childs propagation of commercial partner field
for child_id in partner_id.child_ids:
self.assertEqual(
child_id.property_comment_template_id, self.template_id)
|
Move comment_template_id field to the Invoicing tab
|
[IMP] account_invoice_comment_template: Move comment_template_id field to the Invoicing tab
[IMP] account_invoice_comment_template: rename partner field name from comment_template_id to invoice_comment_template_id
[IMP] account_invoice_comment_template: Make partner field company_dependant and move domain definition of invoice fields from the view to the model
[MOV] account_invoice_comment_template: comment_template_id to base_comment_template
[IMP] account_invoice_comment_template: Translate templates when partner changes
|
Python
|
agpl-3.0
|
OCA/reporting-engine,OCA/reporting-engine,OCA/reporting-engine,OCA/reporting-engine
|
from odoo.tests.common import TransactionCase
class TestResPartner(TransactionCase):
def setUp(self):
+ super(TestResPartner, self).setUp()
self.template_id = self.env['base.comment.template'].create({
'name': 'Comment before lines',
'position': 'before_lines',
'text': 'Text before lines',
})
def test_commercial_partner_fields(self):
# Azure Interior
partner_id = self.env.ref('base.res_partner_12')
partner_id.property_comment_template_id = self.template_id.id
# Test childs propagation of commercial partner field
for child_id in partner_id.child_ids:
self.assertEqual(
- child_id.property_comment_template_id == self.template_id)
+ child_id.property_comment_template_id, self.template_id)
|
Move comment_template_id field to the Invoicing tab
|
## Code Before:
from odoo.tests.common import TransactionCase
class TestResPartner(TransactionCase):
def setUp(self):
self.template_id = self.env['base.comment.template'].create({
'name': 'Comment before lines',
'position': 'before_lines',
'text': 'Text before lines',
})
def test_commercial_partner_fields(self):
# Azure Interior
partner_id = self.env.ref('base.res_partner_12')
partner_id.property_comment_template_id = self.template_id.id
# Test childs propagation of commercial partner field
for child_id in partner_id.child_ids:
self.assertEqual(
child_id.property_comment_template_id == self.template_id)
## Instruction:
Move comment_template_id field to the Invoicing tab
## Code After:
from odoo.tests.common import TransactionCase
class TestResPartner(TransactionCase):
def setUp(self):
super(TestResPartner, self).setUp()
self.template_id = self.env['base.comment.template'].create({
'name': 'Comment before lines',
'position': 'before_lines',
'text': 'Text before lines',
})
def test_commercial_partner_fields(self):
# Azure Interior
partner_id = self.env.ref('base.res_partner_12')
partner_id.property_comment_template_id = self.template_id.id
# Test childs propagation of commercial partner field
for child_id in partner_id.child_ids:
self.assertEqual(
child_id.property_comment_template_id, self.template_id)
|
from odoo.tests.common import TransactionCase
class TestResPartner(TransactionCase):
def setUp(self):
+ super(TestResPartner, self).setUp()
self.template_id = self.env['base.comment.template'].create({
'name': 'Comment before lines',
'position': 'before_lines',
'text': 'Text before lines',
})
def test_commercial_partner_fields(self):
# Azure Interior
partner_id = self.env.ref('base.res_partner_12')
partner_id.property_comment_template_id = self.template_id.id
# Test childs propagation of commercial partner field
for child_id in partner_id.child_ids:
self.assertEqual(
- child_id.property_comment_template_id == self.template_id)
? ^^^
+ child_id.property_comment_template_id, self.template_id)
? ^
|
342d3791aa80084309ffc00a9e5e936fa8277401
|
AFQ/viz.py
|
AFQ/viz.py
|
import tempfile
import os.path as op
import numpy as np
import IPython.display as display
import nibabel as nib
from dipy.viz import fvtk
from palettable.tableau import Tableau_20
def visualize_bundles(trk, ren=None, inline=True, interact=False):
"""
Visualize bundles in 3D using fvtk
"""
if isinstance(trk, str):
trk = nib.streamlines.load(trk)
if ren is None:
ren = fvtk.ren()
for b in np.unique(trk.tractogram.data_per_streamline['bundle']):
idx = np.where(trk.tractogram.data_per_streamline['bundle'] == b)[0]
this_sl = list(trk.streamlines[idx])
sl_actor = fvtk.line(this_sl, Tableau_20.colors[np.mod(20, int(b))])
fvtk.add(ren, sl_actor)
if inline:
tdir = tempfile.gettempdir()
fname = op.join(tdir, "fig.png")
fvtk.record(ren, out_path=fname)
display.display_png(display.Image(fname))
if interact:
fvtk.show(ren)
return ren
|
import tempfile
import os.path as op
import numpy as np
import IPython.display as display
import nibabel as nib
from dipy.viz import fvtk
from dipy.viz.colormap import line_colors
from palettable.tableau import Tableau_20
def visualize_bundles(trk, ren=None, inline=True, interact=False):
"""
Visualize bundles in 3D using fvtk
"""
if isinstance(trk, str):
trk = nib.streamlines.load(trk)
if ren is None:
ren = fvtk.ren()
# There are no bundles in here:
if list(trk.tractogram.data_per_streamline.keys()) == []:
streamlines = list(trk.streamlines)
sl_actor = fvtk.line(streamlines, line_colors(streamlines))
fvtk.add(ren, sl_actor)
for b in np.unique(trk.tractogram.data_per_streamline['bundle']):
idx = np.where(trk.tractogram.data_per_streamline['bundle'] == b)[0]
this_sl = list(trk.streamlines[idx])
sl_actor = fvtk.line(this_sl, Tableau_20.colors[np.mod(20, int(b))])
fvtk.add(ren, sl_actor)
if inline:
tdir = tempfile.gettempdir()
fname = op.join(tdir, "fig.png")
fvtk.record(ren, out_path=fname)
display.display_png(display.Image(fname))
if interact:
fvtk.show(ren)
return ren
|
Enable visualizing trk files without bundle designations.
|
Enable visualizing trk files without bundle designations.
|
Python
|
bsd-2-clause
|
yeatmanlab/pyAFQ,arokem/pyAFQ,yeatmanlab/pyAFQ,arokem/pyAFQ
|
import tempfile
import os.path as op
import numpy as np
import IPython.display as display
import nibabel as nib
from dipy.viz import fvtk
+ from dipy.viz.colormap import line_colors
+
from palettable.tableau import Tableau_20
def visualize_bundles(trk, ren=None, inline=True, interact=False):
"""
Visualize bundles in 3D using fvtk
"""
if isinstance(trk, str):
trk = nib.streamlines.load(trk)
if ren is None:
ren = fvtk.ren()
+
+ # There are no bundles in here:
+ if list(trk.tractogram.data_per_streamline.keys()) == []:
+ streamlines = list(trk.streamlines)
+ sl_actor = fvtk.line(streamlines, line_colors(streamlines))
+ fvtk.add(ren, sl_actor)
for b in np.unique(trk.tractogram.data_per_streamline['bundle']):
idx = np.where(trk.tractogram.data_per_streamline['bundle'] == b)[0]
this_sl = list(trk.streamlines[idx])
sl_actor = fvtk.line(this_sl, Tableau_20.colors[np.mod(20, int(b))])
fvtk.add(ren, sl_actor)
if inline:
tdir = tempfile.gettempdir()
fname = op.join(tdir, "fig.png")
fvtk.record(ren, out_path=fname)
display.display_png(display.Image(fname))
if interact:
fvtk.show(ren)
return ren
|
Enable visualizing trk files without bundle designations.
|
## Code Before:
import tempfile
import os.path as op
import numpy as np
import IPython.display as display
import nibabel as nib
from dipy.viz import fvtk
from palettable.tableau import Tableau_20
def visualize_bundles(trk, ren=None, inline=True, interact=False):
"""
Visualize bundles in 3D using fvtk
"""
if isinstance(trk, str):
trk = nib.streamlines.load(trk)
if ren is None:
ren = fvtk.ren()
for b in np.unique(trk.tractogram.data_per_streamline['bundle']):
idx = np.where(trk.tractogram.data_per_streamline['bundle'] == b)[0]
this_sl = list(trk.streamlines[idx])
sl_actor = fvtk.line(this_sl, Tableau_20.colors[np.mod(20, int(b))])
fvtk.add(ren, sl_actor)
if inline:
tdir = tempfile.gettempdir()
fname = op.join(tdir, "fig.png")
fvtk.record(ren, out_path=fname)
display.display_png(display.Image(fname))
if interact:
fvtk.show(ren)
return ren
## Instruction:
Enable visualizing trk files without bundle designations.
## Code After:
import tempfile
import os.path as op
import numpy as np
import IPython.display as display
import nibabel as nib
from dipy.viz import fvtk
from dipy.viz.colormap import line_colors
from palettable.tableau import Tableau_20
def visualize_bundles(trk, ren=None, inline=True, interact=False):
"""
Visualize bundles in 3D using fvtk
"""
if isinstance(trk, str):
trk = nib.streamlines.load(trk)
if ren is None:
ren = fvtk.ren()
# There are no bundles in here:
if list(trk.tractogram.data_per_streamline.keys()) == []:
streamlines = list(trk.streamlines)
sl_actor = fvtk.line(streamlines, line_colors(streamlines))
fvtk.add(ren, sl_actor)
for b in np.unique(trk.tractogram.data_per_streamline['bundle']):
idx = np.where(trk.tractogram.data_per_streamline['bundle'] == b)[0]
this_sl = list(trk.streamlines[idx])
sl_actor = fvtk.line(this_sl, Tableau_20.colors[np.mod(20, int(b))])
fvtk.add(ren, sl_actor)
if inline:
tdir = tempfile.gettempdir()
fname = op.join(tdir, "fig.png")
fvtk.record(ren, out_path=fname)
display.display_png(display.Image(fname))
if interact:
fvtk.show(ren)
return ren
|
import tempfile
import os.path as op
import numpy as np
import IPython.display as display
import nibabel as nib
from dipy.viz import fvtk
+ from dipy.viz.colormap import line_colors
+
from palettable.tableau import Tableau_20
def visualize_bundles(trk, ren=None, inline=True, interact=False):
"""
Visualize bundles in 3D using fvtk
"""
if isinstance(trk, str):
trk = nib.streamlines.load(trk)
if ren is None:
ren = fvtk.ren()
+
+ # There are no bundles in here:
+ if list(trk.tractogram.data_per_streamline.keys()) == []:
+ streamlines = list(trk.streamlines)
+ sl_actor = fvtk.line(streamlines, line_colors(streamlines))
+ fvtk.add(ren, sl_actor)
for b in np.unique(trk.tractogram.data_per_streamline['bundle']):
idx = np.where(trk.tractogram.data_per_streamline['bundle'] == b)[0]
this_sl = list(trk.streamlines[idx])
sl_actor = fvtk.line(this_sl, Tableau_20.colors[np.mod(20, int(b))])
fvtk.add(ren, sl_actor)
if inline:
tdir = tempfile.gettempdir()
fname = op.join(tdir, "fig.png")
fvtk.record(ren, out_path=fname)
display.display_png(display.Image(fname))
if interact:
fvtk.show(ren)
return ren
|
e399c0b1988ed8b2981ddc684a0a3652a73ea31e
|
pavelib/utils/test/utils.py
|
pavelib/utils/test/utils.py
|
from paver.easy import sh, task
from pavelib.utils.envs import Env
__test__ = False # do not collect
@task
def clean_test_files():
"""
Clean fixture files used by tests and .pyc files
"""
sh("git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads")
sh("find . -type f -name \"*.pyc\" -delete")
sh("rm -rf test_root/log/auto_screenshots/*")
def clean_dir(directory):
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
sh('find {dir} -type f -delete'.format(dir=directory))
@task
def clean_reports_dir():
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
reports_dir = Env.REPORT_DIR.makedirs_p()
clean_dir(reports_dir)
@task
def clean_mongo():
"""
Clean mongo test databases
"""
sh("mongo {repo_root}/scripts/delete-mongo-test-dbs.js".format(repo_root=Env.REPO_ROOT))
|
from paver.easy import sh, task
from pavelib.utils.envs import Env
__test__ = False # do not collect
@task
def clean_test_files():
"""
Clean fixture files used by tests and .pyc files
"""
sh("git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads")
sh("find . -type f -name \"*.pyc\" -delete")
sh("rm -rf test_root/log/auto_screenshots/*")
sh("rm -rf /tmp/mako_[cl]ms")
def clean_dir(directory):
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
sh('find {dir} -type f -delete'.format(dir=directory))
@task
def clean_reports_dir():
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
reports_dir = Env.REPORT_DIR.makedirs_p()
clean_dir(reports_dir)
@task
def clean_mongo():
"""
Clean mongo test databases
"""
sh("mongo {repo_root}/scripts/delete-mongo-test-dbs.js".format(repo_root=Env.REPO_ROOT))
|
Clean out the mako temp dirs before running tests
|
Clean out the mako temp dirs before running tests
|
Python
|
agpl-3.0
|
zofuthan/edx-platform,eemirtekin/edx-platform,TeachAtTUM/edx-platform,synergeticsedx/deployment-wipro,doismellburning/edx-platform,OmarIthawi/edx-platform,jolyonb/edx-platform,appliedx/edx-platform,philanthropy-u/edx-platform,msegado/edx-platform,pepeportela/edx-platform,JCBarahona/edX,lduarte1991/edx-platform,jamesblunt/edx-platform,vasyarv/edx-platform,valtech-mooc/edx-platform,jonathan-beard/edx-platform,beacloudgenius/edx-platform,DefyVentures/edx-platform,sameetb-cuelogic/edx-platform-test,rismalrv/edx-platform,zubair-arbi/edx-platform,ampax/edx-platform-backup,chauhanhardik/populo,ovnicraft/edx-platform,synergeticsedx/deployment-wipro,antoviaque/edx-platform,Lektorium-LLC/edx-platform,marcore/edx-platform,Endika/edx-platform,vikas1885/test1,unicri/edx-platform,Edraak/edraak-platform,Endika/edx-platform,ampax/edx-platform,motion2015/edx-platform,EDUlib/edx-platform,mtlchun/edx,shabab12/edx-platform,ESOedX/edx-platform,chrisndodge/edx-platform,antonve/s4-project-mooc,CredoReference/edx-platform,dkarakats/edx-platform,cecep-edu/edx-platform,polimediaupv/edx-platform,chauhanhardik/populo,4eek/edx-platform,wwj718/ANALYSE,ahmadiga/min_edx,appsembler/edx-platform,antonve/s4-project-mooc,gsehub/edx-platform,jazkarta/edx-platform-for-isc,xuxiao19910803/edx,ahmedaljazzar/edx-platform,dkarakats/edx-platform,LearnEra/LearnEraPlaftform,kursitet/edx-platform,shubhdev/edxOnBaadal,dsajkl/123,defance/edx-platform,angelapper/edx-platform,edx/edx-platform,devs1991/test_edx_docmode,jamiefolsom/edx-platform,jzoldak/edx-platform,doismellburning/edx-platform,nanolearningllc/edx-platform-cypress,xuxiao19910803/edx-platform,franosincic/edx-platform,DefyVentures/edx-platform,ferabra/edx-platform,cselis86/edx-platform,hastexo/edx-platform,UXE/local-edx,mjirayu/sit_academy,valtech-mooc/edx-platform,dsajkl/reqiop,cecep-edu/edx-platform,marcore/edx-platform,hamzehd/edx-platform,atsolakid/edx-platform,bdero/edx-platform,mitocw/edx-platform,philanthropy-u/edx-platform,SivilTaram/edx-platform,playm2mboy/edx-platform,zadgroup/edx-platform,cyanna/edx-platform,cyanna/edx-platform,olexiim/edx-platform,don-github/edx-platform,Stanford-Online/edx-platform,xinjiguaike/edx-platform,fintech-circle/edx-platform,benpatterson/edx-platform,adoosii/edx-platform,xuxiao19910803/edx-platform,doismellburning/edx-platform,ovnicraft/edx-platform,motion2015/a3,naresh21/synergetics-edx-platform,tiagochiavericosta/edx-platform,shurihell/testasia,dcosentino/edx-platform,AkA84/edx-platform,ubc/edx-platform,andyzsf/edx,gsehub/edx-platform,xinjiguaike/edx-platform,ubc/edx-platform,JCBarahona/edX,ampax/edx-platform,4eek/edx-platform,TeachAtTUM/edx-platform,Edraak/edx-platform,andyzsf/edx,edx/edx-platform,cognitiveclass/edx-platform,EDUlib/edx-platform,AkA84/edx-platform,olexiim/edx-platform,cognitiveclass/edx-platform,mahendra-r/edx-platform,franosincic/edx-platform,miptliot/edx-platform,jruiperezv/ANALYSE,jolyonb/edx-platform,vikas1885/test1,Lektorium-LLC/edx-platform,simbs/edx-platform,BehavioralInsightsTeam/edx-platform,jamesblunt/edx-platform,Softmotions/edx-platform,amir-qayyum-khan/edx-platform,unicri/edx-platform,waheedahmed/edx-platform,xinjiguaike/edx-platform,rhndg/openedx,arifsetiawan/edx-platform,etzhou/edx-platform,xinjiguaike/edx-platform,MakeHer/edx-platform,gsehub/edx-platform,jelugbo/tundex,jazkarta/edx-platform-for-isc,ovnicraft/edx-platform,hamzehd/edx-platform,xingyepei/edx-platform,ferabra/edx-platform,Kalyzee/edx-platform,LearnEra/LearnEraPlaftform,procangroup/edx-platform,CredoReference/edx-platform,appsembler/edx-platform,benpatterson/edx-platform,kxliugang/edx-platform,jazztpt/edx-platform,dsajkl/123,wwj718/edx-platform,Kalyzee/edx-platform,ahmadio/edx-platform,J861449197/edx-platform,jazztpt/edx-platform,Edraak/circleci-edx-platform,utecuy/edx-platform,zerobatu/edx-platform,beni55/edx-platform,knehez/edx-platform,edry/edx-platform,zubair-arbi/edx-platform,bigdatauniversity/edx-platform,kursitet/edx-platform,rhndg/openedx,pabloborrego93/edx-platform,simbs/edx-platform,alexthered/kienhoc-platform,cpennington/edx-platform,gymnasium/edx-platform,zadgroup/edx-platform,fly19890211/edx-platform,martynovp/edx-platform,hamzehd/edx-platform,shurihell/testasia,Shrhawk/edx-platform,zubair-arbi/edx-platform,LearnEra/LearnEraPlaftform,alu042/edx-platform,chand3040/cloud_that,ahmedaljazzar/edx-platform,ferabra/edx-platform,ubc/edx-platform,shubhdev/edx-platform,fintech-circle/edx-platform,zerobatu/edx-platform,waheedahmed/edx-platform,jonathan-beard/edx-platform,shabab12/edx-platform,ak2703/edx-platform,nanolearningllc/edx-platform-cypress-2,jzoldak/edx-platform,franosincic/edx-platform,proversity-org/edx-platform,UOMx/edx-platform,defance/edx-platform,leansoft/edx-platform,Edraak/circleci-edx-platform,cselis86/edx-platform,DNFcode/edx-platform,mitocw/edx-platform,SravanthiSinha/edx-platform,motion2015/a3,devs1991/test_edx_docmode,synergeticsedx/deployment-wipro,synergeticsedx/deployment-wipro,motion2015/edx-platform,chauhanhardik/populo_2,xingyepei/edx-platform,beni55/edx-platform,nttks/jenkins-test,DefyVentures/edx-platform,knehez/edx-platform,rismalrv/edx-platform,nanolearningllc/edx-platform-cypress-2,chauhanhardik/populo_2,solashirai/edx-platform,ESOedX/edx-platform,etzhou/edx-platform,doismellburning/edx-platform,jamiefolsom/edx-platform,halvertoluke/edx-platform,peterm-itr/edx-platform,shubhdev/openedx,marcore/edx-platform,inares/edx-platform,vismartltd/edx-platform,chauhanhardik/populo,jamesblunt/edx-platform,JCBarahona/edX,bdero/edx-platform,lduarte1991/edx-platform,openfun/edx-platform,ahmedaljazzar/edx-platform,arbrandes/edx-platform,a-parhom/edx-platform,Semi-global/edx-platform,B-MOOC/edx-platform,shubhdev/openedx,chrisndodge/edx-platform,4eek/edx-platform,SivilTaram/edx-platform,jbassen/edx-platform,kamalx/edx-platform,longmen21/edx-platform,CredoReference/edx-platform,waheedahmed/edx-platform,doganov/edx-platform,ZLLab-Mooc/edx-platform,jbzdak/edx-platform,openfun/edx-platform,jazkarta/edx-platform,philanthropy-u/edx-platform,adoosii/edx-platform,proversity-org/edx-platform,zofuthan/edx-platform,bitifirefly/edx-platform,OmarIthawi/edx-platform,eemirtekin/edx-platform,Edraak/edx-platform,antoviaque/edx-platform,jamiefolsom/edx-platform,shubhdev/edxOnBaadal,eduNEXT/edunext-platform,chand3040/cloud_that,shabab12/edx-platform,fly19890211/edx-platform,Ayub-Khan/edx-platform,mahendra-r/edx-platform,ahmadio/edx-platform,deepsrijit1105/edx-platform,inares/edx-platform,shurihell/testasia,eemirtekin/edx-platform,kamalx/edx-platform,kmoocdev/edx-platform,etzhou/edx-platform,don-github/edx-platform,amir-qayyum-khan/edx-platform,mcgachey/edx-platform,beni55/edx-platform,DefyVentures/edx-platform,chudaol/edx-platform,sameetb-cuelogic/edx-platform-test,polimediaupv/edx-platform,RPI-OPENEDX/edx-platform,nanolearningllc/edx-platform-cypress-2,cselis86/edx-platform,caesar2164/edx-platform,leansoft/edx-platform,longmen21/edx-platform,itsjeyd/edx-platform,franosincic/edx-platform,prarthitm/edxplatform,jamiefolsom/edx-platform,mushtaqak/edx-platform,jswope00/griffinx,ahmadiga/min_edx,cyanna/edx-platform,appsembler/edx-platform,adoosii/edx-platform,lduarte1991/edx-platform,ampax/edx-platform,iivic/BoiseStateX,polimediaupv/edx-platform,nttks/edx-platform,edx-solutions/edx-platform,DNFcode/edx-platform,stvstnfrd/edx-platform,jelugbo/tundex,motion2015/edx-platform,nttks/jenkins-test,jazkarta/edx-platform,IndonesiaX/edx-platform,vasyarv/edx-platform,Endika/edx-platform,philanthropy-u/edx-platform,10clouds/edx-platform,jruiperezv/ANALYSE,Kalyzee/edx-platform,ak2703/edx-platform,fly19890211/edx-platform,fly19890211/edx-platform,Shrhawk/edx-platform,beni55/edx-platform,nikolas/edx-platform,alexthered/kienhoc-platform,MSOpenTech/edx-platform,appliedx/edx-platform,waheedahmed/edx-platform,openfun/edx-platform,marcore/edx-platform,Lektorium-LLC/edx-platform,edx-solutions/edx-platform,y12uc231/edx-platform,wwj718/edx-platform,don-github/edx-platform,eemirtekin/edx-platform,deepsrijit1105/edx-platform,mushtaqak/edx-platform,a-parhom/edx-platform,shashank971/edx-platform,vikas1885/test1,martynovp/edx-platform,zerobatu/edx-platform,Ayub-Khan/edx-platform,chand3040/cloud_that,shubhdev/edx-platform,tiagochiavericosta/edx-platform,MakeHer/edx-platform,vasyarv/edx-platform,jzoldak/edx-platform,Edraak/circleci-edx-platform,analyseuc3m/ANALYSE-v1,kxliugang/edx-platform,Endika/edx-platform,jonathan-beard/edx-platform,Shrhawk/edx-platform,TeachAtTUM/edx-platform,antonve/s4-project-mooc,ahmadio/edx-platform,chudaol/edx-platform,don-github/edx-platform,andyzsf/edx,romain-li/edx-platform,iivic/BoiseStateX,hastexo/edx-platform,unicri/edx-platform,stvstnfrd/edx-platform,dsajkl/reqiop,chand3040/cloud_that,utecuy/edx-platform,shashank971/edx-platform,shashank971/edx-platform,JioEducation/edx-platform,SivilTaram/edx-platform,zhenzhai/edx-platform,benpatterson/edx-platform,ovnicraft/edx-platform,bigdatauniversity/edx-platform,nanolearningllc/edx-platform-cypress-2,itsjeyd/edx-platform,chrisndodge/edx-platform,Stanford-Online/edx-platform,alu042/edx-platform,Kalyzee/edx-platform,prarthitm/edxplatform,10clouds/edx-platform,xingyepei/edx-platform,nanolearningllc/edx-platform-cypress,jbzdak/edx-platform,LearnEra/LearnEraPlaftform,mahendra-r/edx-platform,xuxiao19910803/edx,utecuy/edx-platform,cyanna/edx-platform,UXE/local-edx,ahmadiga/min_edx,pomegranited/edx-platform,peterm-itr/edx-platform,gymnasium/edx-platform,adoosii/edx-platform,mcgachey/edx-platform,tanmaykm/edx-platform,zerobatu/edx-platform,ahmadiga/min_edx,louyihua/edx-platform,franosincic/edx-platform,gymnasium/edx-platform,jazkarta/edx-platform-for-isc,arifsetiawan/edx-platform,xuxiao19910803/edx,doganov/edx-platform,IONISx/edx-platform,eestay/edx-platform,angelapper/edx-platform,solashirai/edx-platform,tiagochiavericosta/edx-platform,zofuthan/edx-platform,jazkarta/edx-platform-for-isc,Edraak/edx-platform,openfun/edx-platform,tanmaykm/edx-platform,solashirai/edx-platform,ESOedX/edx-platform,knehez/edx-platform,xuxiao19910803/edx,UOMx/edx-platform,a-parhom/edx-platform,playm2mboy/edx-platform,edx-solutions/edx-platform,gsehub/edx-platform,cognitiveclass/edx-platform,JioEducation/edx-platform,itsjeyd/edx-platform,wwj718/edx-platform,Stanford-Online/edx-platform,defance/edx-platform,JioEducation/edx-platform,doganov/edx-platform,mitocw/edx-platform,hastexo/edx-platform,kxliugang/edx-platform,dsajkl/reqiop,zhenzhai/edx-platform,zofuthan/edx-platform,Semi-global/edx-platform,MSOpenTech/edx-platform,olexiim/edx-platform,msegado/edx-platform,simbs/edx-platform,cpennington/edx-platform,mjirayu/sit_academy,vikas1885/test1,jjmiranda/edx-platform,UOMx/edx-platform,tanmaykm/edx-platform,jbzdak/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,longmen21/edx-platform,zhenzhai/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,Kalyzee/edx-platform,ovnicraft/edx-platform,sudheerchintala/LearnEraPlatForm,jazztpt/edx-platform,beacloudgenius/edx-platform,Semi-global/edx-platform,4eek/edx-platform,bigdatauniversity/edx-platform,motion2015/a3,Livit/Livit.Learn.EdX,J861449197/edx-platform,xuxiao19910803/edx-platform,wwj718/ANALYSE,ahmadio/edx-platform,Ayub-Khan/edx-platform,OmarIthawi/edx-platform,xuxiao19910803/edx,mcgachey/edx-platform,shubhdev/openedx,playm2mboy/edx-platform,eduNEXT/edx-platform,ahmedaljazzar/edx-platform,fintech-circle/edx-platform,nttks/jenkins-test,DefyVentures/edx-platform,mjirayu/sit_academy,longmen21/edx-platform,rue89-tech/edx-platform,J861449197/edx-platform,J861449197/edx-platform,nanolearningllc/edx-platform-cypress,chand3040/cloud_that,peterm-itr/edx-platform,shashank971/edx-platform,bigdatauniversity/edx-platform,Softmotions/edx-platform,rue89-tech/edx-platform,atsolakid/edx-platform,raccoongang/edx-platform,shubhdev/edxOnBaadal,vismartltd/edx-platform,knehez/edx-platform,RPI-OPENEDX/edx-platform,mtlchun/edx,antonve/s4-project-mooc,martynovp/edx-platform,analyseuc3m/ANALYSE-v1,IndonesiaX/edx-platform,ESOedX/edx-platform,kamalx/edx-platform,kmoocdev2/edx-platform,alexthered/kienhoc-platform,CredoReference/edx-platform,bigdatauniversity/edx-platform,angelapper/edx-platform,caesar2164/edx-platform,romain-li/edx-platform,RPI-OPENEDX/edx-platform,xuxiao19910803/edx-platform,bitifirefly/edx-platform,B-MOOC/edx-platform,utecuy/edx-platform,kamalx/edx-platform,leansoft/edx-platform,louyihua/edx-platform,cpennington/edx-platform,kmoocdev2/edx-platform,jbassen/edx-platform,rismalrv/edx-platform,y12uc231/edx-platform,ampax/edx-platform,DNFcode/edx-platform,shubhdev/openedx,jruiperezv/ANALYSE,rue89-tech/edx-platform,mtlchun/edx,doismellburning/edx-platform,gymnasium/edx-platform,jolyonb/edx-platform,procangroup/edx-platform,chauhanhardik/populo_2,pabloborrego93/edx-platform,stvstnfrd/edx-platform,edry/edx-platform,chauhanhardik/populo_2,teltek/edx-platform,xingyepei/edx-platform,leansoft/edx-platform,mbareta/edx-platform-ft,kxliugang/edx-platform,zadgroup/edx-platform,caesar2164/edx-platform,analyseuc3m/ANALYSE-v1,IONISx/edx-platform,kmoocdev/edx-platform,jazkarta/edx-platform,shubhdev/edx-platform,beacloudgenius/edx-platform,Semi-global/edx-platform,vasyarv/edx-platform,polimediaupv/edx-platform,shubhdev/edx-platform,Softmotions/edx-platform,nikolas/edx-platform,playm2mboy/edx-platform,dcosentino/edx-platform,kmoocdev2/edx-platform,vikas1885/test1,tanmaykm/edx-platform,motion2015/edx-platform,SravanthiSinha/edx-platform,4eek/edx-platform,dsajkl/reqiop,jswope00/griffinx,nagyistoce/edx-platform,motion2015/edx-platform,ZLLab-Mooc/edx-platform,hamzehd/edx-platform,BehavioralInsightsTeam/edx-platform,valtech-mooc/edx-platform,rismalrv/edx-platform,halvertoluke/edx-platform,antoviaque/edx-platform,shurihell/testasia,devs1991/test_edx_docmode,edx/edx-platform,Livit/Livit.Learn.EdX,lduarte1991/edx-platform,shubhdev/edxOnBaadal,cyanna/edx-platform,devs1991/test_edx_docmode,miptliot/edx-platform,iivic/BoiseStateX,nikolas/edx-platform,jonathan-beard/edx-platform,ampax/edx-platform-backup,Shrhawk/edx-platform,inares/edx-platform,jazkarta/edx-platform-for-isc,devs1991/test_edx_docmode,ak2703/edx-platform,UXE/local-edx,nttks/edx-platform,JCBarahona/edX,cecep-edu/edx-platform,mjirayu/sit_academy,nikolas/edx-platform,jazkarta/edx-platform,motion2015/a3,mitocw/edx-platform,Softmotions/edx-platform,kmoocdev2/edx-platform,sameetb-cuelogic/edx-platform-test,rismalrv/edx-platform,cselis86/edx-platform,solashirai/edx-platform,inares/edx-platform,beacloudgenius/edx-platform,Ayub-Khan/edx-platform,hamzehd/edx-platform,shubhdev/edx-platform,jamesblunt/edx-platform,atsolakid/edx-platform,wwj718/ANALYSE,raccoongang/edx-platform,CourseTalk/edx-platform,edry/edx-platform,edry/edx-platform,iivic/BoiseStateX,pomegranited/edx-platform,nttks/edx-platform,IndonesiaX/edx-platform,tiagochiavericosta/edx-platform,msegado/edx-platform,jbassen/edx-platform,dkarakats/edx-platform,JCBarahona/edX,DNFcode/edx-platform,10clouds/edx-platform,martynovp/edx-platform,ubc/edx-platform,appliedx/edx-platform,jruiperezv/ANALYSE,beni55/edx-platform,mcgachey/edx-platform,amir-qayyum-khan/edx-platform,sudheerchintala/LearnEraPlatForm,wwj718/edx-platform,edx-solutions/edx-platform,proversity-org/edx-platform,angelapper/edx-platform,pabloborrego93/edx-platform,arifsetiawan/edx-platform,zubair-arbi/edx-platform,pomegranited/edx-platform,ampax/edx-platform-backup,jelugbo/tundex,prarthitm/edxplatform,AkA84/edx-platform,dcosentino/edx-platform,jruiperezv/ANALYSE,romain-li/edx-platform,eestay/edx-platform,UXE/local-edx,leansoft/edx-platform,RPI-OPENEDX/edx-platform,TeachAtTUM/edx-platform,B-MOOC/edx-platform,tiagochiavericosta/edx-platform,jjmiranda/edx-platform,devs1991/test_edx_docmode,dsajkl/123,rhndg/openedx,OmarIthawi/edx-platform,prarthitm/edxplatform,jamiefolsom/edx-platform,sameetb-cuelogic/edx-platform-test,Edraak/edraak-platform,jswope00/griffinx,xingyepei/edx-platform,utecuy/edx-platform,mushtaqak/edx-platform,jbassen/edx-platform,louyihua/edx-platform,mbareta/edx-platform-ft,eduNEXT/edx-platform,jonathan-beard/edx-platform,ubc/edx-platform,JioEducation/edx-platform,cecep-edu/edx-platform,nagyistoce/edx-platform,vasyarv/edx-platform,bitifirefly/edx-platform,shubhdev/edxOnBaadal,ahmadio/edx-platform,SravanthiSinha/edx-platform,nanolearningllc/edx-platform-cypress-2,eestay/edx-platform,adoosii/edx-platform,mjirayu/sit_academy,mushtaqak/edx-platform,antonve/s4-project-mooc,a-parhom/edx-platform,bitifirefly/edx-platform,IONISx/edx-platform,atsolakid/edx-platform,y12uc231/edx-platform,bdero/edx-platform,shubhdev/openedx,fly19890211/edx-platform,jbassen/edx-platform,alu042/edx-platform,naresh21/synergetics-edx-platform,Edraak/circleci-edx-platform,sudheerchintala/LearnEraPlatForm,dcosentino/edx-platform,unicri/edx-platform,alexthered/kienhoc-platform,knehez/edx-platform,nikolas/edx-platform,rue89-tech/edx-platform,SravanthiSinha/edx-platform,longmen21/edx-platform,pepeportela/edx-platform,rue89-tech/edx-platform,miptliot/edx-platform,nanolearningllc/edx-platform-cypress,inares/edx-platform,Edraak/edraak-platform,sameetb-cuelogic/edx-platform-test,eduNEXT/edunext-platform,defance/edx-platform,procangroup/edx-platform,MakeHer/edx-platform,dsajkl/123,Edraak/edraak-platform,kmoocdev/edx-platform,itsjeyd/edx-platform,vismartltd/edx-platform,eduNEXT/edunext-platform,pepeportela/edx-platform,SravanthiSinha/edx-platform,kmoocdev/edx-platform,ahmadiga/min_edx,Edraak/edx-platform,edx/edx-platform,zerobatu/edx-platform,J861449197/edx-platform,arbrandes/edx-platform,nagyistoce/edx-platform,shurihell/testasia,y12uc231/edx-platform,kursitet/edx-platform,mahendra-r/edx-platform,procangroup/edx-platform,fintech-circle/edx-platform,zadgroup/edx-platform,DNFcode/edx-platform,halvertoluke/edx-platform,xinjiguaike/edx-platform,jazkarta/edx-platform,iivic/BoiseStateX,pomegranited/edx-platform,ZLLab-Mooc/edx-platform,y12uc231/edx-platform,martynovp/edx-platform,dkarakats/edx-platform,kxliugang/edx-platform,Edraak/circleci-edx-platform,wwj718/ANALYSE,nttks/jenkins-test,mbareta/edx-platform-ft,andyzsf/edx,MakeHer/edx-platform,chauhanhardik/populo,dsajkl/123,ampax/edx-platform-backup,zhenzhai/edx-platform,benpatterson/edx-platform,miptliot/edx-platform,jelugbo/tundex,beacloudgenius/edx-platform,proversity-org/edx-platform,chauhanhardik/populo,B-MOOC/edx-platform,nttks/jenkins-test,teltek/edx-platform,UOMx/edx-platform,arifsetiawan/edx-platform,jelugbo/tundex,AkA84/edx-platform,jjmiranda/edx-platform,arifsetiawan/edx-platform,msegado/edx-platform,jbzdak/edx-platform,kamalx/edx-platform,BehavioralInsightsTeam/edx-platform,mbareta/edx-platform-ft,etzhou/edx-platform,kmoocdev2/edx-platform,nttks/edx-platform,halvertoluke/edx-platform,naresh21/synergetics-edx-platform,jolyonb/edx-platform,MSOpenTech/edx-platform,mahendra-r/edx-platform,ZLLab-Mooc/edx-platform,devs1991/test_edx_docmode,arbrandes/edx-platform,cselis86/edx-platform,IndonesiaX/edx-platform,B-MOOC/edx-platform,chudaol/edx-platform,Ayub-Khan/edx-platform,hastexo/edx-platform,Semi-global/edx-platform,Shrhawk/edx-platform,jamesblunt/edx-platform,EDUlib/edx-platform,nanolearningllc/edx-platform-cypress,devs1991/test_edx_docmode,chudaol/edx-platform,halvertoluke/edx-platform,CourseTalk/edx-platform,mtlchun/edx,MSOpenTech/edx-platform,jbzdak/edx-platform,kmoocdev/edx-platform,shashank971/edx-platform,jazztpt/edx-platform,jjmiranda/edx-platform,ferabra/edx-platform,romain-li/edx-platform,jswope00/griffinx,wwj718/edx-platform,playm2mboy/edx-platform,antoviaque/edx-platform,polimediaupv/edx-platform,zubair-arbi/edx-platform,naresh21/synergetics-edx-platform,ak2703/edx-platform,zofuthan/edx-platform,Livit/Livit.Learn.EdX,AkA84/edx-platform,simbs/edx-platform,olexiim/edx-platform,vismartltd/edx-platform,mtlchun/edx,valtech-mooc/edx-platform,rhndg/openedx,alu042/edx-platform,romain-li/edx-platform,rhndg/openedx,eduNEXT/edx-platform,teltek/edx-platform,pomegranited/edx-platform,vismartltd/edx-platform,amir-qayyum-khan/edx-platform,edry/edx-platform,raccoongang/edx-platform,eestay/edx-platform,appliedx/edx-platform,solashirai/edx-platform,bdero/edx-platform,ferabra/edx-platform,louyihua/edx-platform,CourseTalk/edx-platform,wwj718/ANALYSE,kursitet/edx-platform,chrisndodge/edx-platform,cecep-edu/edx-platform,pabloborrego93/edx-platform,dcosentino/edx-platform,kursitet/edx-platform,caesar2164/edx-platform,etzhou/edx-platform,MSOpenTech/edx-platform,deepsrijit1105/edx-platform,10clouds/edx-platform,cognitiveclass/edx-platform,SivilTaram/edx-platform,deepsrijit1105/edx-platform,eemirtekin/edx-platform,jazztpt/edx-platform,motion2015/a3,eestay/edx-platform,pepeportela/edx-platform,ampax/edx-platform-backup,appliedx/edx-platform,alexthered/kienhoc-platform,jzoldak/edx-platform,RPI-OPENEDX/edx-platform,IONISx/edx-platform,CourseTalk/edx-platform,analyseuc3m/ANALYSE-v1,chudaol/edx-platform,nttks/edx-platform,Livit/Livit.Learn.EdX,waheedahmed/edx-platform,Lektorium-LLC/edx-platform,cognitiveclass/edx-platform,doganov/edx-platform,MakeHer/edx-platform,ZLLab-Mooc/edx-platform,nagyistoce/edx-platform,shabab12/edx-platform,atsolakid/edx-platform,Edraak/edx-platform,zhenzhai/edx-platform,cpennington/edx-platform,peterm-itr/edx-platform,Softmotions/edx-platform,openfun/edx-platform,dkarakats/edx-platform,arbrandes/edx-platform,IndonesiaX/edx-platform,jswope00/griffinx,ak2703/edx-platform,simbs/edx-platform,IONISx/edx-platform,Stanford-Online/edx-platform,valtech-mooc/edx-platform,BehavioralInsightsTeam/edx-platform,teltek/edx-platform,mushtaqak/edx-platform,don-github/edx-platform,zadgroup/edx-platform,stvstnfrd/edx-platform,unicri/edx-platform,bitifirefly/edx-platform,appsembler/edx-platform,benpatterson/edx-platform,xuxiao19910803/edx-platform,doganov/edx-platform,SivilTaram/edx-platform,nagyistoce/edx-platform,mcgachey/edx-platform,sudheerchintala/LearnEraPlatForm,raccoongang/edx-platform,olexiim/edx-platform,chauhanhardik/populo_2
|
from paver.easy import sh, task
from pavelib.utils.envs import Env
__test__ = False # do not collect
@task
def clean_test_files():
"""
Clean fixture files used by tests and .pyc files
"""
sh("git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads")
sh("find . -type f -name \"*.pyc\" -delete")
sh("rm -rf test_root/log/auto_screenshots/*")
+ sh("rm -rf /tmp/mako_[cl]ms")
def clean_dir(directory):
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
sh('find {dir} -type f -delete'.format(dir=directory))
@task
def clean_reports_dir():
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
reports_dir = Env.REPORT_DIR.makedirs_p()
clean_dir(reports_dir)
@task
def clean_mongo():
"""
Clean mongo test databases
"""
sh("mongo {repo_root}/scripts/delete-mongo-test-dbs.js".format(repo_root=Env.REPO_ROOT))
|
Clean out the mako temp dirs before running tests
|
## Code Before:
from paver.easy import sh, task
from pavelib.utils.envs import Env
__test__ = False # do not collect
@task
def clean_test_files():
"""
Clean fixture files used by tests and .pyc files
"""
sh("git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads")
sh("find . -type f -name \"*.pyc\" -delete")
sh("rm -rf test_root/log/auto_screenshots/*")
def clean_dir(directory):
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
sh('find {dir} -type f -delete'.format(dir=directory))
@task
def clean_reports_dir():
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
reports_dir = Env.REPORT_DIR.makedirs_p()
clean_dir(reports_dir)
@task
def clean_mongo():
"""
Clean mongo test databases
"""
sh("mongo {repo_root}/scripts/delete-mongo-test-dbs.js".format(repo_root=Env.REPO_ROOT))
## Instruction:
Clean out the mako temp dirs before running tests
## Code After:
from paver.easy import sh, task
from pavelib.utils.envs import Env
__test__ = False # do not collect
@task
def clean_test_files():
"""
Clean fixture files used by tests and .pyc files
"""
sh("git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads")
sh("find . -type f -name \"*.pyc\" -delete")
sh("rm -rf test_root/log/auto_screenshots/*")
sh("rm -rf /tmp/mako_[cl]ms")
def clean_dir(directory):
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
sh('find {dir} -type f -delete'.format(dir=directory))
@task
def clean_reports_dir():
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
reports_dir = Env.REPORT_DIR.makedirs_p()
clean_dir(reports_dir)
@task
def clean_mongo():
"""
Clean mongo test databases
"""
sh("mongo {repo_root}/scripts/delete-mongo-test-dbs.js".format(repo_root=Env.REPO_ROOT))
|
from paver.easy import sh, task
from pavelib.utils.envs import Env
__test__ = False # do not collect
@task
def clean_test_files():
"""
Clean fixture files used by tests and .pyc files
"""
sh("git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads")
sh("find . -type f -name \"*.pyc\" -delete")
sh("rm -rf test_root/log/auto_screenshots/*")
+ sh("rm -rf /tmp/mako_[cl]ms")
def clean_dir(directory):
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
sh('find {dir} -type f -delete'.format(dir=directory))
@task
def clean_reports_dir():
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
reports_dir = Env.REPORT_DIR.makedirs_p()
clean_dir(reports_dir)
@task
def clean_mongo():
"""
Clean mongo test databases
"""
sh("mongo {repo_root}/scripts/delete-mongo-test-dbs.js".format(repo_root=Env.REPO_ROOT))
|
13a39f4e025160f584beef8442e82ec3c3526a95
|
raco/myrial/cli_test.py
|
raco/myrial/cli_test.py
|
"""Basic test of the command-line interface to Myrial."""
import subprocess
import unittest
class CliTest(unittest.TestCase):
def test_cli(self):
out = subprocess.check_output(['python', 'scripts/myrial',
'examples/reachable.myl'])
self.assertIn('DO', out)
self.assertIn('WHILE', out)
def test_cli_reserved_column_name(self):
proc = subprocess.Popen(
['python', 'scripts/myrial', 'examples/bad_column_name.myl'],
stdout=subprocess.PIPE)
out = proc.communicate()[0]
self.assertIn('The token "SafeDiv" on line 2 is reserved', out)
|
"""Basic test of the command-line interface to Myrial."""
import subprocess
import unittest
class CliTest(unittest.TestCase):
def test_cli(self):
out = subprocess.check_output(['python', 'scripts/myrial',
'examples/reachable.myl'])
self.assertIn('DO', out)
self.assertIn('WHILE', out)
def test_cli_standalone(self):
out = subprocess.check_output(['python', 'scripts/myrial', '-f',
'examples/standalone.myl'])
self.assertIn('Dan Suciu,engineering', out)
def test_cli_reserved_column_name(self):
proc = subprocess.Popen(
['python', 'scripts/myrial', 'examples/bad_column_name.myl'],
stdout=subprocess.PIPE)
out = proc.communicate()[0]
self.assertIn('The token "SafeDiv" on line 2 is reserved', out)
|
Test of standalone myrial mode
|
Test of standalone myrial mode
|
Python
|
bsd-3-clause
|
uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco
|
"""Basic test of the command-line interface to Myrial."""
import subprocess
import unittest
class CliTest(unittest.TestCase):
def test_cli(self):
out = subprocess.check_output(['python', 'scripts/myrial',
'examples/reachable.myl'])
self.assertIn('DO', out)
self.assertIn('WHILE', out)
+ def test_cli_standalone(self):
+ out = subprocess.check_output(['python', 'scripts/myrial', '-f',
+ 'examples/standalone.myl'])
+ self.assertIn('Dan Suciu,engineering', out)
+
def test_cli_reserved_column_name(self):
proc = subprocess.Popen(
['python', 'scripts/myrial', 'examples/bad_column_name.myl'],
stdout=subprocess.PIPE)
out = proc.communicate()[0]
self.assertIn('The token "SafeDiv" on line 2 is reserved', out)
|
Test of standalone myrial mode
|
## Code Before:
"""Basic test of the command-line interface to Myrial."""
import subprocess
import unittest
class CliTest(unittest.TestCase):
def test_cli(self):
out = subprocess.check_output(['python', 'scripts/myrial',
'examples/reachable.myl'])
self.assertIn('DO', out)
self.assertIn('WHILE', out)
def test_cli_reserved_column_name(self):
proc = subprocess.Popen(
['python', 'scripts/myrial', 'examples/bad_column_name.myl'],
stdout=subprocess.PIPE)
out = proc.communicate()[0]
self.assertIn('The token "SafeDiv" on line 2 is reserved', out)
## Instruction:
Test of standalone myrial mode
## Code After:
"""Basic test of the command-line interface to Myrial."""
import subprocess
import unittest
class CliTest(unittest.TestCase):
def test_cli(self):
out = subprocess.check_output(['python', 'scripts/myrial',
'examples/reachable.myl'])
self.assertIn('DO', out)
self.assertIn('WHILE', out)
def test_cli_standalone(self):
out = subprocess.check_output(['python', 'scripts/myrial', '-f',
'examples/standalone.myl'])
self.assertIn('Dan Suciu,engineering', out)
def test_cli_reserved_column_name(self):
proc = subprocess.Popen(
['python', 'scripts/myrial', 'examples/bad_column_name.myl'],
stdout=subprocess.PIPE)
out = proc.communicate()[0]
self.assertIn('The token "SafeDiv" on line 2 is reserved', out)
|
"""Basic test of the command-line interface to Myrial."""
import subprocess
import unittest
class CliTest(unittest.TestCase):
def test_cli(self):
out = subprocess.check_output(['python', 'scripts/myrial',
'examples/reachable.myl'])
self.assertIn('DO', out)
self.assertIn('WHILE', out)
+ def test_cli_standalone(self):
+ out = subprocess.check_output(['python', 'scripts/myrial', '-f',
+ 'examples/standalone.myl'])
+ self.assertIn('Dan Suciu,engineering', out)
+
def test_cli_reserved_column_name(self):
proc = subprocess.Popen(
['python', 'scripts/myrial', 'examples/bad_column_name.myl'],
stdout=subprocess.PIPE)
out = proc.communicate()[0]
self.assertIn('The token "SafeDiv" on line 2 is reserved', out)
|
e704d8cb63e76bb1f5b1da6fec7ae4f65d7710f1
|
tests/__init__.py
|
tests/__init__.py
|
import sys
try:
# noinspection PyPackageRequirements
import unittest2 as unittest
sys.modules['unittest'] = unittest
except ImportError:
import unittest
from goless.backends import current as be
class BaseTests(unittest.TestCase):
"""
Base class for unit tests.
Yields in setup and teardown so no lingering tasklets
are run in a later test,
potentially causing an error that would leave people scratching their heads.
"""
def setUp(self):
be.yield_()
def tearDown(self):
be.yield_()
self.assertTrue(be.would_deadlock())
|
import sys
try:
# noinspection PyPackageRequirements
import unittest2 as unittest
sys.modules['unittest'] = unittest
except ImportError:
import unittest
from goless.backends import current as be
class BaseTests(unittest.TestCase):
"""
Base class for unit tests.
Yields in setup and teardown so no lingering tasklets
are run in a later test,
potentially causing an error that would leave people scratching their heads.
"""
def setUp(self):
be.yield_()
def tearDown(self):
be.yield_()
self.assertTrue(be.would_deadlock(), 'Task(s) still running after test finished')
|
Add comment in BaseTests tearDown
|
Add comment in BaseTests tearDown
|
Python
|
apache-2.0
|
rgalanakis/goless,rgalanakis/goless
|
import sys
try:
# noinspection PyPackageRequirements
import unittest2 as unittest
sys.modules['unittest'] = unittest
except ImportError:
import unittest
from goless.backends import current as be
class BaseTests(unittest.TestCase):
"""
Base class for unit tests.
Yields in setup and teardown so no lingering tasklets
are run in a later test,
potentially causing an error that would leave people scratching their heads.
"""
def setUp(self):
be.yield_()
def tearDown(self):
be.yield_()
- self.assertTrue(be.would_deadlock())
+ self.assertTrue(be.would_deadlock(), 'Task(s) still running after test finished')
|
Add comment in BaseTests tearDown
|
## Code Before:
import sys
try:
# noinspection PyPackageRequirements
import unittest2 as unittest
sys.modules['unittest'] = unittest
except ImportError:
import unittest
from goless.backends import current as be
class BaseTests(unittest.TestCase):
"""
Base class for unit tests.
Yields in setup and teardown so no lingering tasklets
are run in a later test,
potentially causing an error that would leave people scratching their heads.
"""
def setUp(self):
be.yield_()
def tearDown(self):
be.yield_()
self.assertTrue(be.would_deadlock())
## Instruction:
Add comment in BaseTests tearDown
## Code After:
import sys
try:
# noinspection PyPackageRequirements
import unittest2 as unittest
sys.modules['unittest'] = unittest
except ImportError:
import unittest
from goless.backends import current as be
class BaseTests(unittest.TestCase):
"""
Base class for unit tests.
Yields in setup and teardown so no lingering tasklets
are run in a later test,
potentially causing an error that would leave people scratching their heads.
"""
def setUp(self):
be.yield_()
def tearDown(self):
be.yield_()
self.assertTrue(be.would_deadlock(), 'Task(s) still running after test finished')
|
import sys
try:
# noinspection PyPackageRequirements
import unittest2 as unittest
sys.modules['unittest'] = unittest
except ImportError:
import unittest
from goless.backends import current as be
class BaseTests(unittest.TestCase):
"""
Base class for unit tests.
Yields in setup and teardown so no lingering tasklets
are run in a later test,
potentially causing an error that would leave people scratching their heads.
"""
def setUp(self):
be.yield_()
def tearDown(self):
be.yield_()
- self.assertTrue(be.would_deadlock())
+ self.assertTrue(be.would_deadlock(), 'Task(s) still running after test finished')
|
b57a599640c6fa8bf23f081c914b7437e3f04dcd
|
course_discovery/apps/courses/management/commands/refresh_all_courses.py
|
course_discovery/apps/courses/management/commands/refresh_all_courses.py
|
import logging
from optparse import make_option
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
option_list = BaseCommand.option_list + (
make_option('--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'),
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
import logging
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
def add_arguments(self, parser):
parser.add_argument(
'--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
Switch to argparse for management command argument parsing
|
Switch to argparse for management command argument parsing
|
Python
|
agpl-3.0
|
edx/course-discovery,edx/course-discovery,edx/course-discovery,edx/course-discovery
|
import logging
- from optparse import make_option
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
- option_list = BaseCommand.option_list + (
+ def add_arguments(self, parser):
+ parser.add_argument(
- make_option('--access_token',
+ '--access_token',
- action='store',
+ action='store',
- dest='access_token',
+ dest='access_token',
- default=None,
+ default=None,
- help='OAuth2 access token used to authenticate API calls.'),
+ help='OAuth2 access token used to authenticate API calls.'
- )
+ )
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
Switch to argparse for management command argument parsing
|
## Code Before:
import logging
from optparse import make_option
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
option_list = BaseCommand.option_list + (
make_option('--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'),
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
## Instruction:
Switch to argparse for management command argument parsing
## Code After:
import logging
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
def add_arguments(self, parser):
parser.add_argument(
'--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
import logging
- from optparse import make_option
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
- option_list = BaseCommand.option_list + (
+ def add_arguments(self, parser):
+ parser.add_argument(
- make_option('--access_token',
? ^^^^^^^^^^^^
+ '--access_token',
? ^^^^
- action='store',
? --------
+ action='store',
- dest='access_token',
? --------
+ dest='access_token',
- default=None,
? --------
+ default=None,
- help='OAuth2 access token used to authenticate API calls.'),
? -------- --
+ help='OAuth2 access token used to authenticate API calls.'
- )
+ )
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
9413b4b24c318df4bf68069038081d08fa9ad2e8
|
vumi/transports/infobip/__init__.py
|
vumi/transports/infobip/__init__.py
|
"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport
__all__ = ['InfobipTransport']
|
"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport, InfobipError
__all__ = ['InfobipTransport', 'InfobipError']
|
Add InfobipError to things exported by Infobip package.
|
Add InfobipError to things exported by Infobip package.
|
Python
|
bsd-3-clause
|
TouK/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix
|
"""Infobip transport."""
- from vumi.transports.infobip.infobip import InfobipTransport
+ from vumi.transports.infobip.infobip import InfobipTransport, InfobipError
- __all__ = ['InfobipTransport']
+ __all__ = ['InfobipTransport', 'InfobipError']
|
Add InfobipError to things exported by Infobip package.
|
## Code Before:
"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport
__all__ = ['InfobipTransport']
## Instruction:
Add InfobipError to things exported by Infobip package.
## Code After:
"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport, InfobipError
__all__ = ['InfobipTransport', 'InfobipError']
|
"""Infobip transport."""
- from vumi.transports.infobip.infobip import InfobipTransport
+ from vumi.transports.infobip.infobip import InfobipTransport, InfobipError
? ++++++++++++++
- __all__ = ['InfobipTransport']
+ __all__ = ['InfobipTransport', 'InfobipError']
? ++++++++++++++++
|
23456a32038f13c6219b6af5ff9fff7e1daae242
|
abusehelper/core/tests/test_utils.py
|
abusehelper/core/tests/test_utils.py
|
import pickle
import unittest
from .. import utils
class TestCompressedCollection(unittest.TestCase):
def test_collection_can_be_pickled_and_unpickled(self):
original = utils.CompressedCollection()
original.append("ab")
original.append("cd")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_to_an_unpickled_collection(self):
original = utils.CompressedCollection()
original.append("ab")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab"], list(unpickled))
unpickled.append("cd")
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_a_collection_after_pickling(self):
original = utils.CompressedCollection()
original.append("ab")
pickle.dumps(original)
original.append("cd")
self.assertEqual(["ab", "cd"], list(original))
|
import socket
import pickle
import urllib2
import unittest
import idiokit
from .. import utils
class TestFetchUrl(unittest.TestCase):
def test_should_raise_TypeError_when_passing_in_an_opener(self):
sock = socket.socket()
try:
sock.bind(("localhost", 0))
sock.listen(1)
_, port = sock.getsockname()
opener = urllib2.build_opener()
fetch = utils.fetch_url("http://localhost:{0}".format(port), opener=opener)
self.assertRaises(TypeError, idiokit.main_loop, fetch)
finally:
sock.close()
class TestCompressedCollection(unittest.TestCase):
def test_collection_can_be_pickled_and_unpickled(self):
original = utils.CompressedCollection()
original.append("ab")
original.append("cd")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_to_an_unpickled_collection(self):
original = utils.CompressedCollection()
original.append("ab")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab"], list(unpickled))
unpickled.append("cd")
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_a_collection_after_pickling(self):
original = utils.CompressedCollection()
original.append("ab")
pickle.dumps(original)
original.append("cd")
self.assertEqual(["ab", "cd"], list(original))
|
Add a test for utils.fetch_url(..., opener=...)
|
Add a test for utils.fetch_url(..., opener=...)
Signed-off-by: Ossi Herrala <[email protected]>
|
Python
|
mit
|
abusesa/abusehelper
|
+ import socket
import pickle
+ import urllib2
import unittest
+ import idiokit
+
from .. import utils
+
+
+ class TestFetchUrl(unittest.TestCase):
+ def test_should_raise_TypeError_when_passing_in_an_opener(self):
+ sock = socket.socket()
+ try:
+ sock.bind(("localhost", 0))
+ sock.listen(1)
+ _, port = sock.getsockname()
+
+ opener = urllib2.build_opener()
+ fetch = utils.fetch_url("http://localhost:{0}".format(port), opener=opener)
+ self.assertRaises(TypeError, idiokit.main_loop, fetch)
+ finally:
+ sock.close()
class TestCompressedCollection(unittest.TestCase):
def test_collection_can_be_pickled_and_unpickled(self):
original = utils.CompressedCollection()
original.append("ab")
original.append("cd")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_to_an_unpickled_collection(self):
original = utils.CompressedCollection()
original.append("ab")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab"], list(unpickled))
unpickled.append("cd")
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_a_collection_after_pickling(self):
original = utils.CompressedCollection()
original.append("ab")
pickle.dumps(original)
original.append("cd")
self.assertEqual(["ab", "cd"], list(original))
|
Add a test for utils.fetch_url(..., opener=...)
|
## Code Before:
import pickle
import unittest
from .. import utils
class TestCompressedCollection(unittest.TestCase):
def test_collection_can_be_pickled_and_unpickled(self):
original = utils.CompressedCollection()
original.append("ab")
original.append("cd")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_to_an_unpickled_collection(self):
original = utils.CompressedCollection()
original.append("ab")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab"], list(unpickled))
unpickled.append("cd")
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_a_collection_after_pickling(self):
original = utils.CompressedCollection()
original.append("ab")
pickle.dumps(original)
original.append("cd")
self.assertEqual(["ab", "cd"], list(original))
## Instruction:
Add a test for utils.fetch_url(..., opener=...)
## Code After:
import socket
import pickle
import urllib2
import unittest
import idiokit
from .. import utils
class TestFetchUrl(unittest.TestCase):
def test_should_raise_TypeError_when_passing_in_an_opener(self):
sock = socket.socket()
try:
sock.bind(("localhost", 0))
sock.listen(1)
_, port = sock.getsockname()
opener = urllib2.build_opener()
fetch = utils.fetch_url("http://localhost:{0}".format(port), opener=opener)
self.assertRaises(TypeError, idiokit.main_loop, fetch)
finally:
sock.close()
class TestCompressedCollection(unittest.TestCase):
def test_collection_can_be_pickled_and_unpickled(self):
original = utils.CompressedCollection()
original.append("ab")
original.append("cd")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_to_an_unpickled_collection(self):
original = utils.CompressedCollection()
original.append("ab")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab"], list(unpickled))
unpickled.append("cd")
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_a_collection_after_pickling(self):
original = utils.CompressedCollection()
original.append("ab")
pickle.dumps(original)
original.append("cd")
self.assertEqual(["ab", "cd"], list(original))
|
+ import socket
import pickle
+ import urllib2
import unittest
+ import idiokit
+
from .. import utils
+
+
+ class TestFetchUrl(unittest.TestCase):
+ def test_should_raise_TypeError_when_passing_in_an_opener(self):
+ sock = socket.socket()
+ try:
+ sock.bind(("localhost", 0))
+ sock.listen(1)
+ _, port = sock.getsockname()
+
+ opener = urllib2.build_opener()
+ fetch = utils.fetch_url("http://localhost:{0}".format(port), opener=opener)
+ self.assertRaises(TypeError, idiokit.main_loop, fetch)
+ finally:
+ sock.close()
class TestCompressedCollection(unittest.TestCase):
def test_collection_can_be_pickled_and_unpickled(self):
original = utils.CompressedCollection()
original.append("ab")
original.append("cd")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_to_an_unpickled_collection(self):
original = utils.CompressedCollection()
original.append("ab")
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(["ab"], list(unpickled))
unpickled.append("cd")
self.assertEqual(["ab", "cd"], list(unpickled))
def test_objects_can_be_appended_a_collection_after_pickling(self):
original = utils.CompressedCollection()
original.append("ab")
pickle.dumps(original)
original.append("cd")
self.assertEqual(["ab", "cd"], list(original))
|
0c327e17dba29a4e94213f264fe7ea931bb26782
|
invoke/parser/argument.py
|
invoke/parser/argument.py
|
class Argument(object):
def __init__(self, name=None, names=(), kind=str, default=None):
if name and names:
msg = "Cannot give both 'name' and 'names' arguments! Pick one."
raise TypeError(msg)
if not (name or names):
raise TypeError("An Argument must have at least one name.")
self.names = names if names else (name,)
self.kind = kind
self.raw_value = self._value = None
self.default = default
def __str__(self):
return "Arg: %r (%s)" % (self.names, self.kind)
@property
def takes_value(self):
return self.kind is not bool
@property
def value(self):
return self._value if self._value is not None else self.default
@value.setter
def value(self, arg):
self.raw_value = arg
self._value = self.kind(arg)
|
class Argument(object):
def __init__(self, name=None, names=(), kind=str, default=None, help=None):
if name and names:
msg = "Cannot give both 'name' and 'names' arguments! Pick one."
raise TypeError(msg)
if not (name or names):
raise TypeError("An Argument must have at least one name.")
self.names = names if names else (name,)
self.kind = kind
self.raw_value = self._value = None
self.default = default
self.help = help
def __str__(self):
return "Arg: %r (%s)" % (self.names, self.kind)
@property
def takes_value(self):
return self.kind is not bool
@property
def value(self):
return self._value if self._value is not None else self.default
@value.setter
def value(self, arg):
self.raw_value = arg
self._value = self.kind(arg)
|
Add support for per Argument help data
|
Add support for per Argument help data
|
Python
|
bsd-2-clause
|
kejbaly2/invoke,mattrobenolt/invoke,pyinvoke/invoke,pfmoore/invoke,sophacles/invoke,kejbaly2/invoke,pfmoore/invoke,mkusz/invoke,mattrobenolt/invoke,mkusz/invoke,tyewang/invoke,pyinvoke/invoke,singingwolfboy/invoke,frol/invoke,frol/invoke,alex/invoke
|
class Argument(object):
- def __init__(self, name=None, names=(), kind=str, default=None):
+ def __init__(self, name=None, names=(), kind=str, default=None, help=None):
if name and names:
msg = "Cannot give both 'name' and 'names' arguments! Pick one."
raise TypeError(msg)
if not (name or names):
raise TypeError("An Argument must have at least one name.")
self.names = names if names else (name,)
self.kind = kind
self.raw_value = self._value = None
self.default = default
+ self.help = help
def __str__(self):
return "Arg: %r (%s)" % (self.names, self.kind)
@property
def takes_value(self):
return self.kind is not bool
@property
def value(self):
return self._value if self._value is not None else self.default
@value.setter
def value(self, arg):
self.raw_value = arg
self._value = self.kind(arg)
|
Add support for per Argument help data
|
## Code Before:
class Argument(object):
def __init__(self, name=None, names=(), kind=str, default=None):
if name and names:
msg = "Cannot give both 'name' and 'names' arguments! Pick one."
raise TypeError(msg)
if not (name or names):
raise TypeError("An Argument must have at least one name.")
self.names = names if names else (name,)
self.kind = kind
self.raw_value = self._value = None
self.default = default
def __str__(self):
return "Arg: %r (%s)" % (self.names, self.kind)
@property
def takes_value(self):
return self.kind is not bool
@property
def value(self):
return self._value if self._value is not None else self.default
@value.setter
def value(self, arg):
self.raw_value = arg
self._value = self.kind(arg)
## Instruction:
Add support for per Argument help data
## Code After:
class Argument(object):
def __init__(self, name=None, names=(), kind=str, default=None, help=None):
if name and names:
msg = "Cannot give both 'name' and 'names' arguments! Pick one."
raise TypeError(msg)
if not (name or names):
raise TypeError("An Argument must have at least one name.")
self.names = names if names else (name,)
self.kind = kind
self.raw_value = self._value = None
self.default = default
self.help = help
def __str__(self):
return "Arg: %r (%s)" % (self.names, self.kind)
@property
def takes_value(self):
return self.kind is not bool
@property
def value(self):
return self._value if self._value is not None else self.default
@value.setter
def value(self, arg):
self.raw_value = arg
self._value = self.kind(arg)
|
class Argument(object):
- def __init__(self, name=None, names=(), kind=str, default=None):
+ def __init__(self, name=None, names=(), kind=str, default=None, help=None):
? +++++++++++
if name and names:
msg = "Cannot give both 'name' and 'names' arguments! Pick one."
raise TypeError(msg)
if not (name or names):
raise TypeError("An Argument must have at least one name.")
self.names = names if names else (name,)
self.kind = kind
self.raw_value = self._value = None
self.default = default
+ self.help = help
def __str__(self):
return "Arg: %r (%s)" % (self.names, self.kind)
@property
def takes_value(self):
return self.kind is not bool
@property
def value(self):
return self._value if self._value is not None else self.default
@value.setter
def value(self, arg):
self.raw_value = arg
self._value = self.kind(arg)
|
08718ce949e7f80b0cbe39c3eba4446133c6d72d
|
code/marv-api/marv_api/deprecation.py
|
code/marv-api/marv_api/deprecation.py
|
import warnings
from dataclasses import dataclass
from typing import Any
@dataclass
class Info:
module: str
version: str
obj: Any
msg: str = None
def make_getattr(module, dct):
assert all(x.module == module for x in dct.values())
def __getattr__(name):
info = dct.get(name)
if info is None:
raise AttributeError(f'module {module} has no attribute {name}')
msg = (
f'{module}.{name} will be removed in {info.version}; '
f'{info.msg or "please let us know if this is an issue for you."}'
)
warnings.warn(msg, FutureWarning, stacklevel=2)
return info.obj
return __getattr__
|
import functools
import warnings
from dataclasses import dataclass
from typing import Any
@dataclass
class Info:
module: str
version: str
obj: Any
msg: str = None
def make_getattr(module, dct):
assert all(x.module == module for x in dct.values())
def __getattr__(name):
info = dct.get(name)
if info is None:
raise AttributeError(f'module {module} has no attribute {name}')
msg = (
f'{module}.{name} will be removed in {info.version}; '
f'{info.msg or "please let us know if this is an issue for you."}'
)
warnings.warn(msg, FutureWarning, stacklevel=2)
return info.obj
return __getattr__
def deprecated(version, msg=None, name=None):
"""Wrap function to trigger deprecated message upon call."""
def deco(func):
@functools.wraps(func)
def wrapper(*args, **kw):
_msg = (
f'{func.__module__}.{name or func.__name__} will be removed in {version}; '
f'{msg or "please let us know if this is an issue for you."}'
)
warnings.warn(_msg, FutureWarning, stacklevel=2)
return func(*args, **kw)
return wrapper
return deco
|
Add decorator to declare function deprecated
|
Add decorator to declare function deprecated
|
Python
|
agpl-3.0
|
ternaris/marv-robotics,ternaris/marv-robotics
|
+ import functools
import warnings
from dataclasses import dataclass
from typing import Any
@dataclass
class Info:
module: str
version: str
obj: Any
msg: str = None
def make_getattr(module, dct):
assert all(x.module == module for x in dct.values())
def __getattr__(name):
info = dct.get(name)
if info is None:
raise AttributeError(f'module {module} has no attribute {name}')
msg = (
f'{module}.{name} will be removed in {info.version}; '
f'{info.msg or "please let us know if this is an issue for you."}'
)
warnings.warn(msg, FutureWarning, stacklevel=2)
return info.obj
return __getattr__
+
+ def deprecated(version, msg=None, name=None):
+ """Wrap function to trigger deprecated message upon call."""
+ def deco(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kw):
+ _msg = (
+ f'{func.__module__}.{name or func.__name__} will be removed in {version}; '
+ f'{msg or "please let us know if this is an issue for you."}'
+ )
+ warnings.warn(_msg, FutureWarning, stacklevel=2)
+ return func(*args, **kw)
+ return wrapper
+ return deco
+
|
Add decorator to declare function deprecated
|
## Code Before:
import warnings
from dataclasses import dataclass
from typing import Any
@dataclass
class Info:
module: str
version: str
obj: Any
msg: str = None
def make_getattr(module, dct):
assert all(x.module == module for x in dct.values())
def __getattr__(name):
info = dct.get(name)
if info is None:
raise AttributeError(f'module {module} has no attribute {name}')
msg = (
f'{module}.{name} will be removed in {info.version}; '
f'{info.msg or "please let us know if this is an issue for you."}'
)
warnings.warn(msg, FutureWarning, stacklevel=2)
return info.obj
return __getattr__
## Instruction:
Add decorator to declare function deprecated
## Code After:
import functools
import warnings
from dataclasses import dataclass
from typing import Any
@dataclass
class Info:
module: str
version: str
obj: Any
msg: str = None
def make_getattr(module, dct):
assert all(x.module == module for x in dct.values())
def __getattr__(name):
info = dct.get(name)
if info is None:
raise AttributeError(f'module {module} has no attribute {name}')
msg = (
f'{module}.{name} will be removed in {info.version}; '
f'{info.msg or "please let us know if this is an issue for you."}'
)
warnings.warn(msg, FutureWarning, stacklevel=2)
return info.obj
return __getattr__
def deprecated(version, msg=None, name=None):
"""Wrap function to trigger deprecated message upon call."""
def deco(func):
@functools.wraps(func)
def wrapper(*args, **kw):
_msg = (
f'{func.__module__}.{name or func.__name__} will be removed in {version}; '
f'{msg or "please let us know if this is an issue for you."}'
)
warnings.warn(_msg, FutureWarning, stacklevel=2)
return func(*args, **kw)
return wrapper
return deco
|
+ import functools
import warnings
from dataclasses import dataclass
from typing import Any
@dataclass
class Info:
module: str
version: str
obj: Any
msg: str = None
def make_getattr(module, dct):
assert all(x.module == module for x in dct.values())
def __getattr__(name):
info = dct.get(name)
if info is None:
raise AttributeError(f'module {module} has no attribute {name}')
msg = (
f'{module}.{name} will be removed in {info.version}; '
f'{info.msg or "please let us know if this is an issue for you."}'
)
warnings.warn(msg, FutureWarning, stacklevel=2)
return info.obj
return __getattr__
+
+
+ def deprecated(version, msg=None, name=None):
+ """Wrap function to trigger deprecated message upon call."""
+ def deco(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kw):
+ _msg = (
+ f'{func.__module__}.{name or func.__name__} will be removed in {version}; '
+ f'{msg or "please let us know if this is an issue for you."}'
+ )
+ warnings.warn(_msg, FutureWarning, stacklevel=2)
+ return func(*args, **kw)
+ return wrapper
+ return deco
|
0a9f2d46325ce6856a3979127390f2e48357abd9
|
schedule2stimuli.py
|
schedule2stimuli.py
|
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli
a = 0
b = []
phase = ''
for session in range(1,36):
print "%s" % session
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
print ',' . join(map(str,b))
print str(a)
|
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli and write csv
a = 0
b = []
phase = ''
csvfile = open('stimuli_' + str(p) + '.csv', 'wb')
writer = csv.writer(csvfile, delimiter=',')
for session in range(1,36):
writer.writerow([session])
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
writer.writerow(b)
writer.writerow([a])
|
Write stimuli schedule to csv file.
|
Write stimuli schedule to csv file.
|
Python
|
cc0-1.0
|
earcanal/dotprobe,earcanal/dotprobe,earcanal/dotprobe
|
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
- # allocate stimuli
- a = 0
- b = []
+ # allocate stimuli and write csv
+ a = 0
+ b = []
- phase = ''
+ phase = ''
+ csvfile = open('stimuli_' + str(p) + '.csv', 'wb')
+ writer = csv.writer(csvfile, delimiter=',')
for session in range(1,36):
- print "%s" % session
+ writer.writerow([session])
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
- print ',' . join(map(str,b))
- print str(a)
+ writer.writerow(b)
+ writer.writerow([a])
|
Write stimuli schedule to csv file.
|
## Code Before:
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli
a = 0
b = []
phase = ''
for session in range(1,36):
print "%s" % session
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
print ',' . join(map(str,b))
print str(a)
## Instruction:
Write stimuli schedule to csv file.
## Code After:
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli and write csv
a = 0
b = []
phase = ''
csvfile = open('stimuli_' + str(p) + '.csv', 'wb')
writer = csv.writer(csvfile, delimiter=',')
for session in range(1,36):
writer.writerow([session])
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
writer.writerow(b)
writer.writerow([a])
|
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
- # allocate stimuli
- a = 0
- b = []
+ # allocate stimuli and write csv
+ a = 0
+ b = []
- phase = ''
+ phase = ''
? ++
+ csvfile = open('stimuli_' + str(p) + '.csv', 'wb')
+ writer = csv.writer(csvfile, delimiter=',')
for session in range(1,36):
- print "%s" % session
+ writer.writerow([session])
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
- print ',' . join(map(str,b))
- print str(a)
+ writer.writerow(b)
+ writer.writerow([a])
|
43a2cb58df9dc3e4e91370d9b10c62c0d05b8798
|
papermill/tests/test_cli.py
|
papermill/tests/test_cli.py
|
""" Test the command line interface """
import pytest
from ..cli import _is_int, _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, True),
("False", False),
("None", False),
(-8.2, True),
(10, False),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, False),
("False", False),
("None", False),
(-8.2, False),
(10, True),
("hello world", False),
("😍", False),
])
def test_is_int(value, expected):
assert (_is_int(value)) == expected
|
""" Test the command line interface """
import pytest
from ..cli import _is_int, _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
("12.51", 12.51),
(10, 10),
("10", 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, True),
("False", False),
("None", False),
(-8.2, True),
(10, False),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, False),
("False", False),
("None", False),
(-8.2, False),
("-23.2", False),
(10, True),
("13", True),
("hello world", False),
("😍", False),
])
def test_is_int(value, expected):
assert (_is_int(value)) == expected
|
Add test to include strings to numbers
|
Add test to include strings to numbers
|
Python
|
bsd-3-clause
|
nteract/papermill,nteract/papermill
|
""" Test the command line interface """
import pytest
from ..cli import _is_int, _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
+ ("12.51", 12.51),
(10, 10),
+ ("10", 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, True),
("False", False),
("None", False),
(-8.2, True),
(10, False),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, False),
("False", False),
("None", False),
(-8.2, False),
+ ("-23.2", False),
(10, True),
+ ("13", True),
("hello world", False),
("😍", False),
])
def test_is_int(value, expected):
assert (_is_int(value)) == expected
|
Add test to include strings to numbers
|
## Code Before:
""" Test the command line interface """
import pytest
from ..cli import _is_int, _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, True),
("False", False),
("None", False),
(-8.2, True),
(10, False),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, False),
("False", False),
("None", False),
(-8.2, False),
(10, True),
("hello world", False),
("😍", False),
])
def test_is_int(value, expected):
assert (_is_int(value)) == expected
## Instruction:
Add test to include strings to numbers
## Code After:
""" Test the command line interface """
import pytest
from ..cli import _is_int, _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
("12.51", 12.51),
(10, 10),
("10", 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, True),
("False", False),
("None", False),
(-8.2, True),
(10, False),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, False),
("False", False),
("None", False),
(-8.2, False),
("-23.2", False),
(10, True),
("13", True),
("hello world", False),
("😍", False),
])
def test_is_int(value, expected):
assert (_is_int(value)) == expected
|
""" Test the command line interface """
import pytest
from ..cli import _is_int, _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
+ ("12.51", 12.51),
(10, 10),
+ ("10", 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, True),
("False", False),
("None", False),
(-8.2, True),
(10, False),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, False),
("False", False),
("None", False),
(-8.2, False),
+ ("-23.2", False),
(10, True),
+ ("13", True),
("hello world", False),
("😍", False),
])
def test_is_int(value, expected):
assert (_is_int(value)) == expected
|
cc42cf63bc3bf887933635e824cc838204738e30
|
tests/acceptance/shared.py
|
tests/acceptance/shared.py
|
"""Shared acceptance test functions."""
from time import sleep
def wait(condition, step=0.1, max_steps=10):
"""Wait for a condition to become true."""
for i in range(max_steps - 1):
if condition():
return True
else:
sleep(step)
return condition()
def get_listing_create_form(listing):
"""Open and return the create form of a listing."""
listing.find_by_css('.navbar .button').first.click()
return listing.find_by_css('.listing-create-form').first
|
"""Shared acceptance test functions."""
from time import sleep
def wait(condition, step=0.1, max_steps=10):
"""Wait for a condition to become true."""
for i in range(max_steps - 1):
if condition():
return True
else:
sleep(step)
return condition()
def get_listing_create_form(listing):
"""Open and return the create form of a listing."""
button = listing.find_by_css('.navbar .button').first
wait(lambda: button.visible)
button.click()
return listing.find_by_css('.listing-create-form').first
|
Fix acceptance tests: for for button to be visible
|
Fix acceptance tests: for for button to be visible
|
Python
|
agpl-3.0
|
xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator
|
"""Shared acceptance test functions."""
from time import sleep
def wait(condition, step=0.1, max_steps=10):
"""Wait for a condition to become true."""
for i in range(max_steps - 1):
if condition():
return True
else:
sleep(step)
return condition()
+
def get_listing_create_form(listing):
"""Open and return the create form of a listing."""
- listing.find_by_css('.navbar .button').first.click()
+ button = listing.find_by_css('.navbar .button').first
+ wait(lambda: button.visible)
+ button.click()
return listing.find_by_css('.listing-create-form').first
|
Fix acceptance tests: for for button to be visible
|
## Code Before:
"""Shared acceptance test functions."""
from time import sleep
def wait(condition, step=0.1, max_steps=10):
"""Wait for a condition to become true."""
for i in range(max_steps - 1):
if condition():
return True
else:
sleep(step)
return condition()
def get_listing_create_form(listing):
"""Open and return the create form of a listing."""
listing.find_by_css('.navbar .button').first.click()
return listing.find_by_css('.listing-create-form').first
## Instruction:
Fix acceptance tests: for for button to be visible
## Code After:
"""Shared acceptance test functions."""
from time import sleep
def wait(condition, step=0.1, max_steps=10):
"""Wait for a condition to become true."""
for i in range(max_steps - 1):
if condition():
return True
else:
sleep(step)
return condition()
def get_listing_create_form(listing):
"""Open and return the create form of a listing."""
button = listing.find_by_css('.navbar .button').first
wait(lambda: button.visible)
button.click()
return listing.find_by_css('.listing-create-form').first
|
"""Shared acceptance test functions."""
from time import sleep
def wait(condition, step=0.1, max_steps=10):
"""Wait for a condition to become true."""
for i in range(max_steps - 1):
if condition():
return True
else:
sleep(step)
return condition()
+
def get_listing_create_form(listing):
"""Open and return the create form of a listing."""
- listing.find_by_css('.navbar .button').first.click()
? --------
+ button = listing.find_by_css('.navbar .button').first
? +++++++++
+ wait(lambda: button.visible)
+ button.click()
return listing.find_by_css('.listing-create-form').first
|
e3d1805094ea3df86c94fdc116d1f718975a338e
|
src/me/maxwu/cistat/app/cistat.py
|
src/me/maxwu/cistat/app/cistat.py
|
__author__ = 'maxwu'
import json
from me.maxwu.cistat import config
from me.maxwu.cistat.reqs.circleci_request import CircleCiReq
from me.maxwu.cistat.model.xunit_report import Xunitrpt
"""Main script file to provide configuration loading, cli_app and version.
"""
VERSION = "1.0"
def cli_app():
vcs, project, username = config.get_circleci_vcs(), config.get_circleci_project(), config.get_circleci_username()
urls = CircleCiReq.get_recent_artifacts(
token=config.get_circleci_token(),
vcs=vcs,
project=project,
username=username
)
report = Xunitrpt()
for artifact in urls:
print("fetching {}".format(artifact))
report += Xunitrpt(xunit=CircleCiReq.get_artifact_report(url=artifact))
print("Top 10 failure cases: {}".format(report.get_cases_in_rate()[:10]))
print("Plot Barchart of Pass Rate")
report.plot_barchart_rate(project, "Pass Rate per case")
if __name__ == '__main__':
cli_app()
|
__author__ = 'maxwu'
import pprint
from me.maxwu.cistat import config
from me.maxwu.cistat.reqs.circleci_request import CircleCiReq
from me.maxwu.cistat.model.xunit_report import Xunitrpt
"""Main script file to provide configuration loading, cli_app and version.
"""
VERSION = "1.0"
def cli_app():
vcs, project, username = config.get_circleci_vcs(), config.get_circleci_project(), config.get_circleci_username()
urls = CircleCiReq.get_recent_artifacts(
token=config.get_circleci_token(),
vcs=vcs,
project=project,
username=username
)
report = Xunitrpt()
for artifact in urls:
print("fetching {}".format(artifact))
report += Xunitrpt(xunit=CircleCiReq.get_artifact_report(url=artifact))
print("Top 10 failure cases:")
pprint.pprint(report.get_cases_in_rate()[:10])
print("Plot Barchart of Pass Rate")
report.plot_barchart_rate(project, "Pass Rate per case")
if __name__ == '__main__':
cli_app()
|
Update sample task with pprint
|
Update sample task with pprint
|
Python
|
mit
|
maxwu/cistat,maxwu/cistat
|
__author__ = 'maxwu'
+ import pprint
- import json
-
from me.maxwu.cistat import config
from me.maxwu.cistat.reqs.circleci_request import CircleCiReq
from me.maxwu.cistat.model.xunit_report import Xunitrpt
"""Main script file to provide configuration loading, cli_app and version.
"""
VERSION = "1.0"
def cli_app():
vcs, project, username = config.get_circleci_vcs(), config.get_circleci_project(), config.get_circleci_username()
urls = CircleCiReq.get_recent_artifacts(
token=config.get_circleci_token(),
vcs=vcs,
project=project,
username=username
)
report = Xunitrpt()
for artifact in urls:
print("fetching {}".format(artifact))
report += Xunitrpt(xunit=CircleCiReq.get_artifact_report(url=artifact))
- print("Top 10 failure cases: {}".format(report.get_cases_in_rate()[:10]))
+ print("Top 10 failure cases:")
+ pprint.pprint(report.get_cases_in_rate()[:10])
print("Plot Barchart of Pass Rate")
report.plot_barchart_rate(project, "Pass Rate per case")
if __name__ == '__main__':
cli_app()
|
Update sample task with pprint
|
## Code Before:
__author__ = 'maxwu'
import json
from me.maxwu.cistat import config
from me.maxwu.cistat.reqs.circleci_request import CircleCiReq
from me.maxwu.cistat.model.xunit_report import Xunitrpt
"""Main script file to provide configuration loading, cli_app and version.
"""
VERSION = "1.0"
def cli_app():
vcs, project, username = config.get_circleci_vcs(), config.get_circleci_project(), config.get_circleci_username()
urls = CircleCiReq.get_recent_artifacts(
token=config.get_circleci_token(),
vcs=vcs,
project=project,
username=username
)
report = Xunitrpt()
for artifact in urls:
print("fetching {}".format(artifact))
report += Xunitrpt(xunit=CircleCiReq.get_artifact_report(url=artifact))
print("Top 10 failure cases: {}".format(report.get_cases_in_rate()[:10]))
print("Plot Barchart of Pass Rate")
report.plot_barchart_rate(project, "Pass Rate per case")
if __name__ == '__main__':
cli_app()
## Instruction:
Update sample task with pprint
## Code After:
__author__ = 'maxwu'
import pprint
from me.maxwu.cistat import config
from me.maxwu.cistat.reqs.circleci_request import CircleCiReq
from me.maxwu.cistat.model.xunit_report import Xunitrpt
"""Main script file to provide configuration loading, cli_app and version.
"""
VERSION = "1.0"
def cli_app():
vcs, project, username = config.get_circleci_vcs(), config.get_circleci_project(), config.get_circleci_username()
urls = CircleCiReq.get_recent_artifacts(
token=config.get_circleci_token(),
vcs=vcs,
project=project,
username=username
)
report = Xunitrpt()
for artifact in urls:
print("fetching {}".format(artifact))
report += Xunitrpt(xunit=CircleCiReq.get_artifact_report(url=artifact))
print("Top 10 failure cases:")
pprint.pprint(report.get_cases_in_rate()[:10])
print("Plot Barchart of Pass Rate")
report.plot_barchart_rate(project, "Pass Rate per case")
if __name__ == '__main__':
cli_app()
|
__author__ = 'maxwu'
+ import pprint
- import json
-
from me.maxwu.cistat import config
from me.maxwu.cistat.reqs.circleci_request import CircleCiReq
from me.maxwu.cistat.model.xunit_report import Xunitrpt
"""Main script file to provide configuration loading, cli_app and version.
"""
VERSION = "1.0"
def cli_app():
vcs, project, username = config.get_circleci_vcs(), config.get_circleci_project(), config.get_circleci_username()
urls = CircleCiReq.get_recent_artifacts(
token=config.get_circleci_token(),
vcs=vcs,
project=project,
username=username
)
report = Xunitrpt()
for artifact in urls:
print("fetching {}".format(artifact))
report += Xunitrpt(xunit=CircleCiReq.get_artifact_report(url=artifact))
- print("Top 10 failure cases: {}".format(report.get_cases_in_rate()[:10]))
+ print("Top 10 failure cases:")
+ pprint.pprint(report.get_cases_in_rate()[:10])
print("Plot Barchart of Pass Rate")
report.plot_barchart_rate(project, "Pass Rate per case")
if __name__ == '__main__':
cli_app()
|
c82eaa445ddbe39f4142de7f51f0d19437a1aef0
|
validators/url.py
|
validators/url.py
|
import re
from .utils import validator
regex = (
r'^[a-z]+://([^/:]+{tld}|([0-9]{{1,3}}\.)'
r'{{3}}[0-9]{{1,3}})(:[0-9]+)?(\/.*)?$'
)
pattern_with_tld = re.compile(regex.format(tld=r'\.[a-z]{2,10}'))
pattern_without_tld = re.compile(regex.format(tld=''))
@validator
def url(value, require_tld=True):
"""
url
---
Returns whether or not given value is a valid URL. If the value is
valid URL this function returns ``True``, otherwise
:class:`~validators.utils.ValidationFailure`.
This validator is based on `WTForms URL validator`_.
.. _WTForms URL validator:
https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py
Examples::
>>> import validators
>>> assert validators.url('http://foobar.dk')
>>> assert validators.url('http://localhost/foobar', require_tld=False)
>>> assert not validators.url('http://foobar.d')
.. versionadded:: 0.2
:param value: URL address string to validate
"""
if require_tld:
return pattern_with_tld.match(value)
return pattern_without_tld.match(value)
|
import re
from .utils import validator
regex = (
r'^[a-z]+://([^/:]+{tld}|([0-9]{{1,3}}\.)'
r'{{3}}[0-9]{{1,3}})(:[0-9]+)?(\/.*)?$'
)
pattern_with_tld = re.compile(regex.format(tld=r'\.[a-z]{2,10}'))
pattern_without_tld = re.compile(regex.format(tld=''))
@validator
def url(value, require_tld=True):
"""
Returns whether or not given value is a valid URL. If the value is
valid URL this function returns ``True``, otherwise
:class:`~validators.utils.ValidationFailure`.
This validator is based on `WTForms URL validator`_.
.. _WTForms URL validator:
https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py
Examples::
>>> import validators
>>> assert validators.url('http://foobar.dk')
>>> assert validators.url('http://localhost/foobar', require_tld=False)
>>> assert not validators.url('http://foobar.d')
.. versionadded:: 0.2
:param value: URL address string to validate
"""
if require_tld:
return pattern_with_tld.match(value)
return pattern_without_tld.match(value)
|
Remove unnecessary heading from docstring
|
Remove unnecessary heading from docstring
|
Python
|
mit
|
kvesteri/validators
|
import re
from .utils import validator
regex = (
r'^[a-z]+://([^/:]+{tld}|([0-9]{{1,3}}\.)'
r'{{3}}[0-9]{{1,3}})(:[0-9]+)?(\/.*)?$'
)
pattern_with_tld = re.compile(regex.format(tld=r'\.[a-z]{2,10}'))
pattern_without_tld = re.compile(regex.format(tld=''))
@validator
def url(value, require_tld=True):
"""
- url
- ---
-
Returns whether or not given value is a valid URL. If the value is
valid URL this function returns ``True``, otherwise
:class:`~validators.utils.ValidationFailure`.
This validator is based on `WTForms URL validator`_.
.. _WTForms URL validator:
https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py
Examples::
>>> import validators
>>> assert validators.url('http://foobar.dk')
>>> assert validators.url('http://localhost/foobar', require_tld=False)
>>> assert not validators.url('http://foobar.d')
.. versionadded:: 0.2
:param value: URL address string to validate
"""
if require_tld:
return pattern_with_tld.match(value)
return pattern_without_tld.match(value)
|
Remove unnecessary heading from docstring
|
## Code Before:
import re
from .utils import validator
regex = (
r'^[a-z]+://([^/:]+{tld}|([0-9]{{1,3}}\.)'
r'{{3}}[0-9]{{1,3}})(:[0-9]+)?(\/.*)?$'
)
pattern_with_tld = re.compile(regex.format(tld=r'\.[a-z]{2,10}'))
pattern_without_tld = re.compile(regex.format(tld=''))
@validator
def url(value, require_tld=True):
"""
url
---
Returns whether or not given value is a valid URL. If the value is
valid URL this function returns ``True``, otherwise
:class:`~validators.utils.ValidationFailure`.
This validator is based on `WTForms URL validator`_.
.. _WTForms URL validator:
https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py
Examples::
>>> import validators
>>> assert validators.url('http://foobar.dk')
>>> assert validators.url('http://localhost/foobar', require_tld=False)
>>> assert not validators.url('http://foobar.d')
.. versionadded:: 0.2
:param value: URL address string to validate
"""
if require_tld:
return pattern_with_tld.match(value)
return pattern_without_tld.match(value)
## Instruction:
Remove unnecessary heading from docstring
## Code After:
import re
from .utils import validator
regex = (
r'^[a-z]+://([^/:]+{tld}|([0-9]{{1,3}}\.)'
r'{{3}}[0-9]{{1,3}})(:[0-9]+)?(\/.*)?$'
)
pattern_with_tld = re.compile(regex.format(tld=r'\.[a-z]{2,10}'))
pattern_without_tld = re.compile(regex.format(tld=''))
@validator
def url(value, require_tld=True):
"""
Returns whether or not given value is a valid URL. If the value is
valid URL this function returns ``True``, otherwise
:class:`~validators.utils.ValidationFailure`.
This validator is based on `WTForms URL validator`_.
.. _WTForms URL validator:
https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py
Examples::
>>> import validators
>>> assert validators.url('http://foobar.dk')
>>> assert validators.url('http://localhost/foobar', require_tld=False)
>>> assert not validators.url('http://foobar.d')
.. versionadded:: 0.2
:param value: URL address string to validate
"""
if require_tld:
return pattern_with_tld.match(value)
return pattern_without_tld.match(value)
|
import re
from .utils import validator
regex = (
r'^[a-z]+://([^/:]+{tld}|([0-9]{{1,3}}\.)'
r'{{3}}[0-9]{{1,3}})(:[0-9]+)?(\/.*)?$'
)
pattern_with_tld = re.compile(regex.format(tld=r'\.[a-z]{2,10}'))
pattern_without_tld = re.compile(regex.format(tld=''))
@validator
def url(value, require_tld=True):
"""
- url
- ---
-
Returns whether or not given value is a valid URL. If the value is
valid URL this function returns ``True``, otherwise
:class:`~validators.utils.ValidationFailure`.
This validator is based on `WTForms URL validator`_.
.. _WTForms URL validator:
https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py
Examples::
>>> import validators
>>> assert validators.url('http://foobar.dk')
>>> assert validators.url('http://localhost/foobar', require_tld=False)
>>> assert not validators.url('http://foobar.d')
.. versionadded:: 0.2
:param value: URL address string to validate
"""
if require_tld:
return pattern_with_tld.match(value)
return pattern_without_tld.match(value)
|
4c3dd0c9d27af0f186f81c4fed0003a9190b4d9e
|
jal_stats/stats/serializers.py
|
jal_stats/stats/serializers.py
|
from rest_framework import serializers
from .models import Activity, Stat
class StatSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'activity', 'reps', 'date')
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
|
from rest_framework import serializers
from .models import Activity, Stat
class StatAddSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'reps', 'date')
class StatSerializer(StatAddSerializer):
class Meta:
model = Stat
fields = tuple(list(StatAddSerializer.Meta.fields) + ['activity'])
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
|
Add new serializer for StatAdd that doesn't have activity
|
Add new serializer for StatAdd that doesn't have activity
|
Python
|
mit
|
jal-stats/django
|
from rest_framework import serializers
from .models import Activity, Stat
- class StatSerializer(serializers.HyperlinkedModelSerializer):
+ class StatAddSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
- fields = ('id', 'activity', 'reps', 'date')
+ fields = ('id', 'reps', 'date')
+
+
+ class StatSerializer(StatAddSerializer):
+
+ class Meta:
+ model = Stat
+ fields = tuple(list(StatAddSerializer.Meta.fields) + ['activity'])
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
|
Add new serializer for StatAdd that doesn't have activity
|
## Code Before:
from rest_framework import serializers
from .models import Activity, Stat
class StatSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'activity', 'reps', 'date')
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
## Instruction:
Add new serializer for StatAdd that doesn't have activity
## Code After:
from rest_framework import serializers
from .models import Activity, Stat
class StatAddSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'reps', 'date')
class StatSerializer(StatAddSerializer):
class Meta:
model = Stat
fields = tuple(list(StatAddSerializer.Meta.fields) + ['activity'])
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
|
from rest_framework import serializers
from .models import Activity, Stat
- class StatSerializer(serializers.HyperlinkedModelSerializer):
+ class StatAddSerializer(serializers.HyperlinkedModelSerializer):
? +++
class Meta:
model = Stat
- fields = ('id', 'activity', 'reps', 'date')
? ------------
+ fields = ('id', 'reps', 'date')
+
+
+ class StatSerializer(StatAddSerializer):
+
+ class Meta:
+ model = Stat
+ fields = tuple(list(StatAddSerializer.Meta.fields) + ['activity'])
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
|
1441654c46e08b7286999b6887e59c56fa238ff7
|
python/piling-up.py
|
python/piling-up.py
|
from collections import deque
def isVerticallyStackable(pile):
vertical_stack = []
while pile:
largest_cube, cube_sizes = remove_largest_cube_from_pile(pile)
if vertical_stack == []:
vertical_stack.append(largest_cube)
else:
top_of_stack = vertical_stack[-1]
if(top_of_stack < largest_cube):
return False
vertical_stack.append(largest_cube)
return True
def remove_largest_cube_from_pile(cube_sizes):
if(cube_sizes == []):
return (None, cube_sizes)
elif(cube_sizes[0] > cube_sizes[-1]):
largest_cube = cube_sizes.popleft()
return (largest_cube, cube_sizes)
else:
largest_cube = cube_sizes.pop()
return (largest_cube, cube_sizes)
num_test_cases = int(input())
for i in range(num_test_cases):
num_cubes = int(input())
pile = deque(map(int, input().strip().split(" ")))
if(isVerticallyStackable(pile)):
print("Yes")
else:
print("No")
|
from collections import deque
def isVerticallyStackable(pile):
vertical_stack = []
while pile:
largest_cube = remove_largest_cube_from_pile(pile)
if vertical_stack == []:
vertical_stack.append(largest_cube)
else:
top_of_stack = vertical_stack[-1]
if(top_of_stack < largest_cube):
return False
vertical_stack.append(largest_cube)
return True
def remove_largest_cube_from_pile(cube_sizes):
if(cube_sizes == []):
return None
elif(cube_sizes[0] > cube_sizes[-1]):
return cube_sizes.popleft()
else:
return cube_sizes.pop()
num_test_cases = int(input())
for i in range(num_test_cases):
num_cubes = int(input())
pile = deque(map(int, input().strip().split(" ")))
if(isVerticallyStackable(pile)):
print("Yes")
else:
print("No")
|
Remove returned pile b/c mutating directly
|
Remove returned pile b/c mutating directly
|
Python
|
mit
|
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
|
from collections import deque
def isVerticallyStackable(pile):
vertical_stack = []
while pile:
- largest_cube, cube_sizes = remove_largest_cube_from_pile(pile)
+ largest_cube = remove_largest_cube_from_pile(pile)
if vertical_stack == []:
vertical_stack.append(largest_cube)
else:
top_of_stack = vertical_stack[-1]
if(top_of_stack < largest_cube):
return False
vertical_stack.append(largest_cube)
return True
def remove_largest_cube_from_pile(cube_sizes):
if(cube_sizes == []):
- return (None, cube_sizes)
+ return None
elif(cube_sizes[0] > cube_sizes[-1]):
- largest_cube = cube_sizes.popleft()
+ return cube_sizes.popleft()
- return (largest_cube, cube_sizes)
else:
- largest_cube = cube_sizes.pop()
+ return cube_sizes.pop()
- return (largest_cube, cube_sizes)
num_test_cases = int(input())
for i in range(num_test_cases):
num_cubes = int(input())
pile = deque(map(int, input().strip().split(" ")))
if(isVerticallyStackable(pile)):
print("Yes")
else:
print("No")
|
Remove returned pile b/c mutating directly
|
## Code Before:
from collections import deque
def isVerticallyStackable(pile):
vertical_stack = []
while pile:
largest_cube, cube_sizes = remove_largest_cube_from_pile(pile)
if vertical_stack == []:
vertical_stack.append(largest_cube)
else:
top_of_stack = vertical_stack[-1]
if(top_of_stack < largest_cube):
return False
vertical_stack.append(largest_cube)
return True
def remove_largest_cube_from_pile(cube_sizes):
if(cube_sizes == []):
return (None, cube_sizes)
elif(cube_sizes[0] > cube_sizes[-1]):
largest_cube = cube_sizes.popleft()
return (largest_cube, cube_sizes)
else:
largest_cube = cube_sizes.pop()
return (largest_cube, cube_sizes)
num_test_cases = int(input())
for i in range(num_test_cases):
num_cubes = int(input())
pile = deque(map(int, input().strip().split(" ")))
if(isVerticallyStackable(pile)):
print("Yes")
else:
print("No")
## Instruction:
Remove returned pile b/c mutating directly
## Code After:
from collections import deque
def isVerticallyStackable(pile):
vertical_stack = []
while pile:
largest_cube = remove_largest_cube_from_pile(pile)
if vertical_stack == []:
vertical_stack.append(largest_cube)
else:
top_of_stack = vertical_stack[-1]
if(top_of_stack < largest_cube):
return False
vertical_stack.append(largest_cube)
return True
def remove_largest_cube_from_pile(cube_sizes):
if(cube_sizes == []):
return None
elif(cube_sizes[0] > cube_sizes[-1]):
return cube_sizes.popleft()
else:
return cube_sizes.pop()
num_test_cases = int(input())
for i in range(num_test_cases):
num_cubes = int(input())
pile = deque(map(int, input().strip().split(" ")))
if(isVerticallyStackable(pile)):
print("Yes")
else:
print("No")
|
from collections import deque
def isVerticallyStackable(pile):
vertical_stack = []
while pile:
- largest_cube, cube_sizes = remove_largest_cube_from_pile(pile)
? ------------
+ largest_cube = remove_largest_cube_from_pile(pile)
if vertical_stack == []:
vertical_stack.append(largest_cube)
else:
top_of_stack = vertical_stack[-1]
if(top_of_stack < largest_cube):
return False
vertical_stack.append(largest_cube)
return True
def remove_largest_cube_from_pile(cube_sizes):
if(cube_sizes == []):
- return (None, cube_sizes)
+ return None
elif(cube_sizes[0] > cube_sizes[-1]):
- largest_cube = cube_sizes.popleft()
? -- - - -- ^^^^
+ return cube_sizes.popleft()
? ^^
- return (largest_cube, cube_sizes)
else:
- largest_cube = cube_sizes.pop()
? -- - - -- ^^^^
+ return cube_sizes.pop()
? ^^
- return (largest_cube, cube_sizes)
num_test_cases = int(input())
for i in range(num_test_cases):
num_cubes = int(input())
pile = deque(map(int, input().strip().split(" ")))
if(isVerticallyStackable(pile)):
print("Yes")
else:
print("No")
|
301463a99dceceb21ecec933f3a83e55ca37c3b8
|
wagtail/wagtailimages/api/admin/serializers.py
|
wagtail/wagtailimages/api/admin/serializers.py
|
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from rest_framework.fields import Field
from ...models import SourceImageIOError
from ..v2.serializers import ImageSerializer
class ImageRenditionField(Field):
"""
A field that generates a rendition with the specified filter spec, and serialises
details of that rendition.
Example:
"thumbnail": {
"url": "/media/images/myimage.max-165x165.jpg",
"width": 165,
"height": 100
}
If there is an error with the source image. The dict will only contain a single
key, "error", indicating this error:
"thumbnail": {
"error": "SourceImageIOError"
}
"""
def __init__(self, filter_spec, *args, **kwargs):
self.filter_spec = filter_spec
super(ImageRenditionField, self).__init__(*args, **kwargs)
def get_attribute(self, instance):
return instance
def to_representation(self, image):
try:
thumbnail = image.get_rendition(self.filter_spec)
return OrderedDict([
('url', thumbnail.url),
('width', thumbnail.width),
('height', thumbnail.height),
])
except SourceImageIOError:
return OrderedDict([
('error', 'SourceImageIOError'),
])
class AdminImageSerializer(ImageSerializer):
thumbnail = ImageRenditionField('max-165x165', read_only=True)
|
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from rest_framework.fields import Field
from ...models import SourceImageIOError
from ..v2.serializers import ImageSerializer
class ImageRenditionField(Field):
"""
A field that generates a rendition with the specified filter spec, and serialises
details of that rendition.
Example:
"thumbnail": {
"url": "/media/images/myimage.max-165x165.jpg",
"width": 165,
"height": 100
}
If there is an error with the source image. The dict will only contain a single
key, "error", indicating this error:
"thumbnail": {
"error": "SourceImageIOError"
}
"""
def __init__(self, filter_spec, *args, **kwargs):
self.filter_spec = filter_spec
super(ImageRenditionField, self).__init__(*args, **kwargs)
def to_representation(self, image):
try:
thumbnail = image.get_rendition(self.filter_spec)
return OrderedDict([
('url', thumbnail.url),
('width', thumbnail.width),
('height', thumbnail.height),
])
except SourceImageIOError:
return OrderedDict([
('error', 'SourceImageIOError'),
])
class AdminImageSerializer(ImageSerializer):
thumbnail = ImageRenditionField('max-165x165', source='*', read_only=True)
|
Use source keyword argument (instead of overriding get_attribute)
|
Use source keyword argument (instead of overriding get_attribute)
This allows the ImageRenditionField to be used on models that contain an
image field.
|
Python
|
bsd-3-clause
|
nealtodd/wagtail,mikedingjan/wagtail,FlipperPA/wagtail,torchbox/wagtail,iansprice/wagtail,jnns/wagtail,wagtail/wagtail,zerolab/wagtail,thenewguy/wagtail,iansprice/wagtail,zerolab/wagtail,rsalmaso/wagtail,gasman/wagtail,timorieber/wagtail,kaedroho/wagtail,mikedingjan/wagtail,torchbox/wagtail,thenewguy/wagtail,zerolab/wagtail,takeflight/wagtail,takeflight/wagtail,gasman/wagtail,rsalmaso/wagtail,nimasmi/wagtail,Toshakins/wagtail,timorieber/wagtail,thenewguy/wagtail,timorieber/wagtail,nimasmi/wagtail,gasman/wagtail,wagtail/wagtail,wagtail/wagtail,mixxorz/wagtail,nealtodd/wagtail,mixxorz/wagtail,zerolab/wagtail,iansprice/wagtail,timorieber/wagtail,jnns/wagtail,gasman/wagtail,nealtodd/wagtail,wagtail/wagtail,iansprice/wagtail,rsalmaso/wagtail,takeflight/wagtail,jnns/wagtail,mixxorz/wagtail,torchbox/wagtail,FlipperPA/wagtail,mixxorz/wagtail,jnns/wagtail,kaedroho/wagtail,Toshakins/wagtail,FlipperPA/wagtail,nimasmi/wagtail,zerolab/wagtail,wagtail/wagtail,nimasmi/wagtail,kaedroho/wagtail,mikedingjan/wagtail,rsalmaso/wagtail,mixxorz/wagtail,thenewguy/wagtail,takeflight/wagtail,kaedroho/wagtail,mikedingjan/wagtail,thenewguy/wagtail,nealtodd/wagtail,rsalmaso/wagtail,gasman/wagtail,FlipperPA/wagtail,Toshakins/wagtail,Toshakins/wagtail,torchbox/wagtail,kaedroho/wagtail
|
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from rest_framework.fields import Field
from ...models import SourceImageIOError
from ..v2.serializers import ImageSerializer
class ImageRenditionField(Field):
"""
A field that generates a rendition with the specified filter spec, and serialises
details of that rendition.
Example:
"thumbnail": {
"url": "/media/images/myimage.max-165x165.jpg",
"width": 165,
"height": 100
}
If there is an error with the source image. The dict will only contain a single
key, "error", indicating this error:
"thumbnail": {
"error": "SourceImageIOError"
}
"""
def __init__(self, filter_spec, *args, **kwargs):
self.filter_spec = filter_spec
super(ImageRenditionField, self).__init__(*args, **kwargs)
- def get_attribute(self, instance):
- return instance
-
def to_representation(self, image):
try:
thumbnail = image.get_rendition(self.filter_spec)
return OrderedDict([
('url', thumbnail.url),
('width', thumbnail.width),
('height', thumbnail.height),
])
except SourceImageIOError:
return OrderedDict([
('error', 'SourceImageIOError'),
])
class AdminImageSerializer(ImageSerializer):
- thumbnail = ImageRenditionField('max-165x165', read_only=True)
+ thumbnail = ImageRenditionField('max-165x165', source='*', read_only=True)
|
Use source keyword argument (instead of overriding get_attribute)
|
## Code Before:
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from rest_framework.fields import Field
from ...models import SourceImageIOError
from ..v2.serializers import ImageSerializer
class ImageRenditionField(Field):
"""
A field that generates a rendition with the specified filter spec, and serialises
details of that rendition.
Example:
"thumbnail": {
"url": "/media/images/myimage.max-165x165.jpg",
"width": 165,
"height": 100
}
If there is an error with the source image. The dict will only contain a single
key, "error", indicating this error:
"thumbnail": {
"error": "SourceImageIOError"
}
"""
def __init__(self, filter_spec, *args, **kwargs):
self.filter_spec = filter_spec
super(ImageRenditionField, self).__init__(*args, **kwargs)
def get_attribute(self, instance):
return instance
def to_representation(self, image):
try:
thumbnail = image.get_rendition(self.filter_spec)
return OrderedDict([
('url', thumbnail.url),
('width', thumbnail.width),
('height', thumbnail.height),
])
except SourceImageIOError:
return OrderedDict([
('error', 'SourceImageIOError'),
])
class AdminImageSerializer(ImageSerializer):
thumbnail = ImageRenditionField('max-165x165', read_only=True)
## Instruction:
Use source keyword argument (instead of overriding get_attribute)
## Code After:
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from rest_framework.fields import Field
from ...models import SourceImageIOError
from ..v2.serializers import ImageSerializer
class ImageRenditionField(Field):
"""
A field that generates a rendition with the specified filter spec, and serialises
details of that rendition.
Example:
"thumbnail": {
"url": "/media/images/myimage.max-165x165.jpg",
"width": 165,
"height": 100
}
If there is an error with the source image. The dict will only contain a single
key, "error", indicating this error:
"thumbnail": {
"error": "SourceImageIOError"
}
"""
def __init__(self, filter_spec, *args, **kwargs):
self.filter_spec = filter_spec
super(ImageRenditionField, self).__init__(*args, **kwargs)
def to_representation(self, image):
try:
thumbnail = image.get_rendition(self.filter_spec)
return OrderedDict([
('url', thumbnail.url),
('width', thumbnail.width),
('height', thumbnail.height),
])
except SourceImageIOError:
return OrderedDict([
('error', 'SourceImageIOError'),
])
class AdminImageSerializer(ImageSerializer):
thumbnail = ImageRenditionField('max-165x165', source='*', read_only=True)
|
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from rest_framework.fields import Field
from ...models import SourceImageIOError
from ..v2.serializers import ImageSerializer
class ImageRenditionField(Field):
"""
A field that generates a rendition with the specified filter spec, and serialises
details of that rendition.
Example:
"thumbnail": {
"url": "/media/images/myimage.max-165x165.jpg",
"width": 165,
"height": 100
}
If there is an error with the source image. The dict will only contain a single
key, "error", indicating this error:
"thumbnail": {
"error": "SourceImageIOError"
}
"""
def __init__(self, filter_spec, *args, **kwargs):
self.filter_spec = filter_spec
super(ImageRenditionField, self).__init__(*args, **kwargs)
- def get_attribute(self, instance):
- return instance
-
def to_representation(self, image):
try:
thumbnail = image.get_rendition(self.filter_spec)
return OrderedDict([
('url', thumbnail.url),
('width', thumbnail.width),
('height', thumbnail.height),
])
except SourceImageIOError:
return OrderedDict([
('error', 'SourceImageIOError'),
])
class AdminImageSerializer(ImageSerializer):
- thumbnail = ImageRenditionField('max-165x165', read_only=True)
+ thumbnail = ImageRenditionField('max-165x165', source='*', read_only=True)
? ++++++++++++
|
3c1e90761bf6d046c3b462dcdddb75335c259433
|
rnacentral/portal/tests/rna_type_tests.py
|
rnacentral/portal/tests/rna_type_tests.py
|
from django.test import TestCase
from portal.models import Rna
class GenericRnaTypeTest(TestCase):
def rna_type_of(self, upi, taxid=None):
return Rna.objects.\
get(upi=upi).\
get_rna_type(taxid=taxid, recompute=True)
def assertRnaTypeIs(self, description, upi, taxid=None):
self.assertEquals(description, self.description_of(upi, taxid=taxid))
class WormTests(GenericRnaTypeTest):
def test_gets_mirna_over_pirna(self):
self.assertRnaTypeIs(
'miRNA',
'URS0000016972',
taxid=6239)
|
from django.test import TestCase
from portal.models import Rna
class GenericRnaTypeTest(TestCase):
def rna_type_of(self, upi, taxid=None):
return Rna.objects.\
get(upi=upi).\
get_rna_type(taxid=taxid, recompute=True)
def assertRnaTypeIs(self, description, upi, taxid=None):
self.assertEquals(description, self.description_of(upi, taxid=taxid))
class WormTests(GenericRnaTypeTest):
def test_gets_mirna_over_pirna(self):
self.assertRnaTypeIs(
'miRNA',
'URS0000016972',
taxid=6239)
class HumanTests(GenericRnaTypeTest):
def test_if_has_both_anti_and_lnc_likes_lnc(self):
self.assertRnaTypeIs(
'lncRNA',
'URS0000732D5D',
taxid=9606)
|
Add test showing issue with rna_type
|
Add test showing issue with rna_type
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
|
from django.test import TestCase
from portal.models import Rna
class GenericRnaTypeTest(TestCase):
def rna_type_of(self, upi, taxid=None):
return Rna.objects.\
get(upi=upi).\
get_rna_type(taxid=taxid, recompute=True)
def assertRnaTypeIs(self, description, upi, taxid=None):
self.assertEquals(description, self.description_of(upi, taxid=taxid))
class WormTests(GenericRnaTypeTest):
def test_gets_mirna_over_pirna(self):
self.assertRnaTypeIs(
'miRNA',
'URS0000016972',
taxid=6239)
+
+ class HumanTests(GenericRnaTypeTest):
+ def test_if_has_both_anti_and_lnc_likes_lnc(self):
+ self.assertRnaTypeIs(
+ 'lncRNA',
+ 'URS0000732D5D',
+ taxid=9606)
+
|
Add test showing issue with rna_type
|
## Code Before:
from django.test import TestCase
from portal.models import Rna
class GenericRnaTypeTest(TestCase):
def rna_type_of(self, upi, taxid=None):
return Rna.objects.\
get(upi=upi).\
get_rna_type(taxid=taxid, recompute=True)
def assertRnaTypeIs(self, description, upi, taxid=None):
self.assertEquals(description, self.description_of(upi, taxid=taxid))
class WormTests(GenericRnaTypeTest):
def test_gets_mirna_over_pirna(self):
self.assertRnaTypeIs(
'miRNA',
'URS0000016972',
taxid=6239)
## Instruction:
Add test showing issue with rna_type
## Code After:
from django.test import TestCase
from portal.models import Rna
class GenericRnaTypeTest(TestCase):
def rna_type_of(self, upi, taxid=None):
return Rna.objects.\
get(upi=upi).\
get_rna_type(taxid=taxid, recompute=True)
def assertRnaTypeIs(self, description, upi, taxid=None):
self.assertEquals(description, self.description_of(upi, taxid=taxid))
class WormTests(GenericRnaTypeTest):
def test_gets_mirna_over_pirna(self):
self.assertRnaTypeIs(
'miRNA',
'URS0000016972',
taxid=6239)
class HumanTests(GenericRnaTypeTest):
def test_if_has_both_anti_and_lnc_likes_lnc(self):
self.assertRnaTypeIs(
'lncRNA',
'URS0000732D5D',
taxid=9606)
|
from django.test import TestCase
from portal.models import Rna
class GenericRnaTypeTest(TestCase):
def rna_type_of(self, upi, taxid=None):
return Rna.objects.\
get(upi=upi).\
get_rna_type(taxid=taxid, recompute=True)
def assertRnaTypeIs(self, description, upi, taxid=None):
self.assertEquals(description, self.description_of(upi, taxid=taxid))
class WormTests(GenericRnaTypeTest):
def test_gets_mirna_over_pirna(self):
self.assertRnaTypeIs(
'miRNA',
'URS0000016972',
taxid=6239)
+
+
+ class HumanTests(GenericRnaTypeTest):
+ def test_if_has_both_anti_and_lnc_likes_lnc(self):
+ self.assertRnaTypeIs(
+ 'lncRNA',
+ 'URS0000732D5D',
+ taxid=9606)
|
efd44be24e84a35db353ac79dae7cc7392a18b0c
|
matador/commands/deploy_ticket.py
|
matador/commands/deploy_ticket.py
|
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project)
self._checkout_ticket(project, 'test')
|
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-t', '--ticket',
type=str,
required=True,
help='Ticket name')
parser.add_argument(
'-b', '--branch',
type=str,
default='master',
help='Branch name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project, self.args.branch)
self._checkout_ticket(project, self.args.ticket)
|
Add ticket and branch arguments
|
Add ticket and branch arguments
|
Python
|
mit
|
Empiria/matador
|
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
+ '-t', '--ticket',
+ type=str,
+ required=True,
+ help='Ticket name')
+
+ parser.add_argument(
+ '-b', '--branch',
+ type=str,
+ default='master',
+ help='Branch name')
+
+ parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
- utils.update_repository(project)
+ utils.update_repository(project, self.args.branch)
- self._checkout_ticket(project, 'test')
+ self._checkout_ticket(project, self.args.ticket)
|
Add ticket and branch arguments
|
## Code Before:
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project)
self._checkout_ticket(project, 'test')
## Instruction:
Add ticket and branch arguments
## Code After:
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-t', '--ticket',
type=str,
required=True,
help='Ticket name')
parser.add_argument(
'-b', '--branch',
type=str,
default='master',
help='Branch name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project, self.args.branch)
self._checkout_ticket(project, self.args.ticket)
|
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
+ '-t', '--ticket',
+ type=str,
+ required=True,
+ help='Ticket name')
+
+ parser.add_argument(
+ '-b', '--branch',
+ type=str,
+ default='master',
+ help='Branch name')
+
+ parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
- utils.update_repository(project)
+ utils.update_repository(project, self.args.branch)
? ++++++++++++++++++
- self._checkout_ticket(project, 'test')
? ^ - -
+ self._checkout_ticket(project, self.args.ticket)
? ^^^^^^^^^^ +++
|
1941cba46978fb7f8182ddb3eddf2d77002b28f7
|
api/base/middleware.py
|
api/base/middleware.py
|
from framework.transactions import handlers
class TokuTransactionsMiddleware(object):
"""TokuMX transaction middleware."""
def process_request(self, request):
handlers.transaction_before_request()
def process_response(self, request, response):
return handlers.transaction_after_request(response)
|
from framework.transactions import handlers, commands
class TokuTransactionsMiddleware(object):
"""TokuMX transaction middleware."""
def process_request(self, request):
handlers.transaction_before_request()
def process_exception(self, request, exception):
commands.rollback()
def process_response(self, request, response):
return handlers.transaction_after_request(response)
|
Make sure transaction is rolled back if an exception is raised
|
Make sure transaction is rolled back if an exception is raised
|
Python
|
apache-2.0
|
TomHeatwole/osf.io,asanfilippo7/osf.io,fabianvf/osf.io,chrisseto/osf.io,dplorimer/osf,monikagrabowska/osf.io,chrisseto/osf.io,TomBaxter/osf.io,doublebits/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,icereval/osf.io,jnayak1/osf.io,haoyuchen1992/osf.io,laurenrevere/osf.io,ckc6cz/osf.io,doublebits/osf.io,njantrania/osf.io,danielneis/osf.io,lyndsysimon/osf.io,DanielSBrown/osf.io,wearpants/osf.io,chrisseto/osf.io,jeffreyliu3230/osf.io,saradbowman/osf.io,acshi/osf.io,mfraezz/osf.io,sloria/osf.io,DanielSBrown/osf.io,acshi/osf.io,petermalcolm/osf.io,aaxelb/osf.io,KAsante95/osf.io,erinspace/osf.io,zamattiac/osf.io,reinaH/osf.io,jnayak1/osf.io,acshi/osf.io,jinluyuan/osf.io,cldershem/osf.io,aaxelb/osf.io,MerlinZhang/osf.io,brianjgeiger/osf.io,sbt9uc/osf.io,leb2dg/osf.io,abought/osf.io,MerlinZhang/osf.io,cldershem/osf.io,mattclark/osf.io,binoculars/osf.io,zachjanicki/osf.io,erinspace/osf.io,asanfilippo7/osf.io,jeffreyliu3230/osf.io,kwierman/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,mluo613/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,RomanZWang/osf.io,doublebits/osf.io,RomanZWang/osf.io,haoyuchen1992/osf.io,hmoco/osf.io,ZobairAlijan/osf.io,zachjanicki/osf.io,ticklemepierce/osf.io,adlius/osf.io,fabianvf/osf.io,rdhyee/osf.io,reinaH/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,cslzchen/osf.io,jmcarp/osf.io,billyhunt/osf.io,caneruguz/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,mfraezz/osf.io,alexschiller/osf.io,icereval/osf.io,acshi/osf.io,caneruguz/osf.io,SSJohns/osf.io,jolene-esposito/osf.io,petermalcolm/osf.io,danielneis/osf.io,cosenal/osf.io,hmoco/osf.io,acshi/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,crcresearch/osf.io,zamattiac/osf.io,mluke93/osf.io,KAsante95/osf.io,hmoco/osf.io,doublebits/osf.io,arpitar/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,HarryRybacki/osf.io,abought/osf.io,jmcarp/osf.io,caseyrygt/osf.io,jinluyuan/osf.io,ticklemepierce/osf.io,TomHeatwole/osf.io,amyshi188/osf.io,brandonPurvis/osf.io,caseyrollins/osf.io,icereval/osf.io,DanielSBrown/osf.io,jinluyuan/osf.io,Johnetordoff/osf.io,felliott/osf.io,asanfilippo7/osf.io,reinaH/osf.io,amyshi188/osf.io,zamattiac/osf.io,jeffreyliu3230/osf.io,ckc6cz/osf.io,kch8qx/osf.io,fabianvf/osf.io,alexschiller/osf.io,HarryRybacki/osf.io,samanehsan/osf.io,GageGaskins/osf.io,jmcarp/osf.io,chennan47/osf.io,brianjgeiger/osf.io,samanehsan/osf.io,lyndsysimon/osf.io,pattisdr/osf.io,barbour-em/osf.io,leb2dg/osf.io,petermalcolm/osf.io,billyhunt/osf.io,caneruguz/osf.io,mfraezz/osf.io,mfraezz/osf.io,amyshi188/osf.io,amyshi188/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,cldershem/osf.io,jolene-esposito/osf.io,dplorimer/osf,dplorimer/osf,felliott/osf.io,saradbowman/osf.io,cosenal/osf.io,felliott/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,monikagrabowska/osf.io,felliott/osf.io,erinspace/osf.io,rdhyee/osf.io,cslzchen/osf.io,zachjanicki/osf.io,ticklemepierce/osf.io,hmoco/osf.io,monikagrabowska/osf.io,danielneis/osf.io,lyndsysimon/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,brandonPurvis/osf.io,ZobairAlijan/osf.io,binoculars/osf.io,zamattiac/osf.io,caseyrygt/osf.io,RomanZWang/osf.io,arpitar/osf.io,sloria/osf.io,samanehsan/osf.io,adlius/osf.io,sloria/osf.io,mluke93/osf.io,jmcarp/osf.io,ZobairAlijan/osf.io,leb2dg/osf.io,abought/osf.io,cldershem/osf.io,ticklemepierce/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,doublebits/osf.io,caneruguz/osf.io,bdyetton/prettychart,emetsger/osf.io,Ghalko/osf.io,adlius/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,SSJohns/osf.io,ckc6cz/osf.io,Nesiehr/osf.io,emetsger/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,asanfilippo7/osf.io,sbt9uc/osf.io,cosenal/osf.io,MerlinZhang/osf.io,jnayak1/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,binoculars/osf.io,GageGaskins/osf.io,mluo613/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,ckc6cz/osf.io,njantrania/osf.io,alexschiller/osf.io,pattisdr/osf.io,kch8qx/osf.io,sbt9uc/osf.io,mluo613/osf.io,SSJohns/osf.io,mluke93/osf.io,emetsger/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,chennan47/osf.io,cslzchen/osf.io,jolene-esposito/osf.io,baylee-d/osf.io,arpitar/osf.io,KAsante95/osf.io,samchrisinger/osf.io,baylee-d/osf.io,jolene-esposito/osf.io,barbour-em/osf.io,kwierman/osf.io,zachjanicki/osf.io,billyhunt/osf.io,Ghalko/osf.io,brianjgeiger/osf.io,bdyetton/prettychart,samchrisinger/osf.io,arpitar/osf.io,fabianvf/osf.io,ZobairAlijan/osf.io,Ghalko/osf.io,danielneis/osf.io,brandonPurvis/osf.io,Nesiehr/osf.io,bdyetton/prettychart,petermalcolm/osf.io,wearpants/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,mluo613/osf.io,mluo613/osf.io,Ghalko/osf.io,RomanZWang/osf.io,cslzchen/osf.io,sbt9uc/osf.io,alexschiller/osf.io,abought/osf.io,haoyuchen1992/osf.io,crcresearch/osf.io,kch8qx/osf.io,barbour-em/osf.io,HarryRybacki/osf.io,chrisseto/osf.io,HarryRybacki/osf.io,barbour-em/osf.io,Nesiehr/osf.io,cosenal/osf.io,chennan47/osf.io,MerlinZhang/osf.io,wearpants/osf.io,reinaH/osf.io,kch8qx/osf.io,bdyetton/prettychart,haoyuchen1992/osf.io,Nesiehr/osf.io,njantrania/osf.io,GageGaskins/osf.io,CenterForOpenScience/osf.io,jeffreyliu3230/osf.io,adlius/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,caseyrygt/osf.io,CenterForOpenScience/osf.io,dplorimer/osf,cwisecarver/osf.io,pattisdr/osf.io,jinluyuan/osf.io,njantrania/osf.io,leb2dg/osf.io,lyndsysimon/osf.io,SSJohns/osf.io,billyhunt/osf.io,mluke93/osf.io,TomBaxter/osf.io,wearpants/osf.io,kwierman/osf.io,kwierman/osf.io,samanehsan/osf.io,billyhunt/osf.io
|
- from framework.transactions import handlers
+ from framework.transactions import handlers, commands
class TokuTransactionsMiddleware(object):
"""TokuMX transaction middleware."""
def process_request(self, request):
handlers.transaction_before_request()
+ def process_exception(self, request, exception):
+ commands.rollback()
+
def process_response(self, request, response):
return handlers.transaction_after_request(response)
|
Make sure transaction is rolled back if an exception is raised
|
## Code Before:
from framework.transactions import handlers
class TokuTransactionsMiddleware(object):
"""TokuMX transaction middleware."""
def process_request(self, request):
handlers.transaction_before_request()
def process_response(self, request, response):
return handlers.transaction_after_request(response)
## Instruction:
Make sure transaction is rolled back if an exception is raised
## Code After:
from framework.transactions import handlers, commands
class TokuTransactionsMiddleware(object):
"""TokuMX transaction middleware."""
def process_request(self, request):
handlers.transaction_before_request()
def process_exception(self, request, exception):
commands.rollback()
def process_response(self, request, response):
return handlers.transaction_after_request(response)
|
- from framework.transactions import handlers
+ from framework.transactions import handlers, commands
? ++++++++++
class TokuTransactionsMiddleware(object):
"""TokuMX transaction middleware."""
def process_request(self, request):
handlers.transaction_before_request()
+ def process_exception(self, request, exception):
+ commands.rollback()
+
def process_response(self, request, response):
return handlers.transaction_after_request(response)
|
31c360fbdb3aa1393715e53ec4dfd86e59d68249
|
staticgen_demo/staticgen_views.py
|
staticgen_demo/staticgen_views.py
|
from __future__ import unicode_literals
from django.conf import settings
from django.utils import translation
from cms.models import Title
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
class StaicgenDemoStaticViews(StaticgenView):
def items(self):
return (
'django.contrib.sitemaps.views.sitemap',
'robots.txt',
'page_not_found',
'application_error',
)
staticgen_pool.register(StaicgenDemoStaticViews)
class StaticgenCMSView(StaticgenView):
def items(self):
items = Title.objects.public().filter(
page__login_required=False,
page__site_id=settings.SITE_ID,
).order_by('page__path')
return items
def url(self, obj):
translation.activate(obj.language)
url = obj.page.get_absolute_url(obj.language)
translation.deactivate()
return url
staticgen_pool.register(StaticgenCMSView)
|
from __future__ import unicode_literals
from django.conf import settings
from django.dispatch import receiver
from django.utils import translation
from cms.models import Title
from cms.signals import page_moved, post_publish, post_unpublish
from staticgen.models import Page
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
class StaicgenDemoStaticViews(StaticgenView):
def items(self):
return (
'django.contrib.sitemaps.views.sitemap',
'robots.txt',
'page_not_found',
'application_error',
)
staticgen_pool.register(StaicgenDemoStaticViews)
class StaticgenCMSView(StaticgenView):
def items(self):
items = Title.objects.public().filter(
page__login_required=False,
page__site_id=settings.SITE_ID,
).order_by('page__path')
return items
def url(self, obj):
translation.activate(obj.language)
url = obj.page.get_absolute_url(obj.language)
translation.deactivate()
return url
staticgen_pool.register(StaticgenCMSView)
@receiver((page_moved, post_publish, post_unpublish, ))
def mark_cms_page_as_changed(sender, **kwargs):
page = kwargs['instance']
language = kwargs['language']
public_url = page.get_public_url(language=language)
try:
page = Page.objects.get(path=public_url)
except Page.DoesNotExist:
pass
else:
page.publisher_state = Page.PUBLISHER_STATE_CHANGED
page.save()
|
Mark CMS pages as changed .. using CMS publisher signals.
|
Mark CMS pages as changed .. using CMS publisher signals.
|
Python
|
bsd-3-clause
|
mishbahr/staticgen-demo,mishbahr/staticgen-demo,mishbahr/staticgen-demo
|
from __future__ import unicode_literals
from django.conf import settings
+ from django.dispatch import receiver
from django.utils import translation
from cms.models import Title
+ from cms.signals import page_moved, post_publish, post_unpublish
+
+ from staticgen.models import Page
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
class StaicgenDemoStaticViews(StaticgenView):
def items(self):
return (
'django.contrib.sitemaps.views.sitemap',
'robots.txt',
'page_not_found',
'application_error',
)
staticgen_pool.register(StaicgenDemoStaticViews)
class StaticgenCMSView(StaticgenView):
def items(self):
items = Title.objects.public().filter(
page__login_required=False,
page__site_id=settings.SITE_ID,
).order_by('page__path')
return items
def url(self, obj):
translation.activate(obj.language)
url = obj.page.get_absolute_url(obj.language)
translation.deactivate()
return url
staticgen_pool.register(StaticgenCMSView)
+
+ @receiver((page_moved, post_publish, post_unpublish, ))
+ def mark_cms_page_as_changed(sender, **kwargs):
+ page = kwargs['instance']
+ language = kwargs['language']
+
+ public_url = page.get_public_url(language=language)
+ try:
+ page = Page.objects.get(path=public_url)
+ except Page.DoesNotExist:
+ pass
+ else:
+ page.publisher_state = Page.PUBLISHER_STATE_CHANGED
+ page.save()
+
|
Mark CMS pages as changed .. using CMS publisher signals.
|
## Code Before:
from __future__ import unicode_literals
from django.conf import settings
from django.utils import translation
from cms.models import Title
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
class StaicgenDemoStaticViews(StaticgenView):
def items(self):
return (
'django.contrib.sitemaps.views.sitemap',
'robots.txt',
'page_not_found',
'application_error',
)
staticgen_pool.register(StaicgenDemoStaticViews)
class StaticgenCMSView(StaticgenView):
def items(self):
items = Title.objects.public().filter(
page__login_required=False,
page__site_id=settings.SITE_ID,
).order_by('page__path')
return items
def url(self, obj):
translation.activate(obj.language)
url = obj.page.get_absolute_url(obj.language)
translation.deactivate()
return url
staticgen_pool.register(StaticgenCMSView)
## Instruction:
Mark CMS pages as changed .. using CMS publisher signals.
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.dispatch import receiver
from django.utils import translation
from cms.models import Title
from cms.signals import page_moved, post_publish, post_unpublish
from staticgen.models import Page
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
class StaicgenDemoStaticViews(StaticgenView):
def items(self):
return (
'django.contrib.sitemaps.views.sitemap',
'robots.txt',
'page_not_found',
'application_error',
)
staticgen_pool.register(StaicgenDemoStaticViews)
class StaticgenCMSView(StaticgenView):
def items(self):
items = Title.objects.public().filter(
page__login_required=False,
page__site_id=settings.SITE_ID,
).order_by('page__path')
return items
def url(self, obj):
translation.activate(obj.language)
url = obj.page.get_absolute_url(obj.language)
translation.deactivate()
return url
staticgen_pool.register(StaticgenCMSView)
@receiver((page_moved, post_publish, post_unpublish, ))
def mark_cms_page_as_changed(sender, **kwargs):
page = kwargs['instance']
language = kwargs['language']
public_url = page.get_public_url(language=language)
try:
page = Page.objects.get(path=public_url)
except Page.DoesNotExist:
pass
else:
page.publisher_state = Page.PUBLISHER_STATE_CHANGED
page.save()
|
from __future__ import unicode_literals
from django.conf import settings
+ from django.dispatch import receiver
from django.utils import translation
from cms.models import Title
+ from cms.signals import page_moved, post_publish, post_unpublish
+
+ from staticgen.models import Page
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
class StaicgenDemoStaticViews(StaticgenView):
def items(self):
return (
'django.contrib.sitemaps.views.sitemap',
'robots.txt',
'page_not_found',
'application_error',
)
staticgen_pool.register(StaicgenDemoStaticViews)
class StaticgenCMSView(StaticgenView):
def items(self):
items = Title.objects.public().filter(
page__login_required=False,
page__site_id=settings.SITE_ID,
).order_by('page__path')
return items
def url(self, obj):
translation.activate(obj.language)
url = obj.page.get_absolute_url(obj.language)
translation.deactivate()
return url
staticgen_pool.register(StaticgenCMSView)
+
+
+ @receiver((page_moved, post_publish, post_unpublish, ))
+ def mark_cms_page_as_changed(sender, **kwargs):
+ page = kwargs['instance']
+ language = kwargs['language']
+
+ public_url = page.get_public_url(language=language)
+ try:
+ page = Page.objects.get(path=public_url)
+ except Page.DoesNotExist:
+ pass
+ else:
+ page.publisher_state = Page.PUBLISHER_STATE_CHANGED
+ page.save()
|
a870433fab72fe184f12353397ad916aabe5cb61
|
pegasus/gtfar/__init__.py
|
pegasus/gtfar/__init__.py
|
__author__ = 'Rajiv Mayani'
|
__author__ = 'Rajiv Mayani'
__VERSION__ = 0.1
from flask import Flask
from flask.ext.cache import Cache
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# Load configuration defaults
app.config.from_object('pegasus.gtfar.defaults')
db = SQLAlchemy(app)
cache = Cache(app)
|
Add boilerplate code to configure the Flask app.
|
Add boilerplate code to configure the Flask app.
|
Python
|
apache-2.0
|
pegasus-isi/pegasus-gtfar,pegasus-isi/pegasus-gtfar,pegasus-isi/pegasus-gtfar,pegasus-isi/pegasus-gtfar
|
__author__ = 'Rajiv Mayani'
+ __VERSION__ = 0.1
+
+ from flask import Flask
+ from flask.ext.cache import Cache
+ from flask.ext.sqlalchemy import SQLAlchemy
+
+ app = Flask(__name__)
+
+ # Load configuration defaults
+ app.config.from_object('pegasus.gtfar.defaults')
+
+ db = SQLAlchemy(app)
+ cache = Cache(app)
+
|
Add boilerplate code to configure the Flask app.
|
## Code Before:
__author__ = 'Rajiv Mayani'
## Instruction:
Add boilerplate code to configure the Flask app.
## Code After:
__author__ = 'Rajiv Mayani'
__VERSION__ = 0.1
from flask import Flask
from flask.ext.cache import Cache
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# Load configuration defaults
app.config.from_object('pegasus.gtfar.defaults')
db = SQLAlchemy(app)
cache = Cache(app)
|
__author__ = 'Rajiv Mayani'
+
+ __VERSION__ = 0.1
+
+ from flask import Flask
+ from flask.ext.cache import Cache
+ from flask.ext.sqlalchemy import SQLAlchemy
+
+ app = Flask(__name__)
+
+ # Load configuration defaults
+ app.config.from_object('pegasus.gtfar.defaults')
+
+ db = SQLAlchemy(app)
+ cache = Cache(app)
|
46c33ca68c1124fb06c4ba62306cb00ba61d7e5c
|
tests/__init__.py
|
tests/__init__.py
|
from flexmock import flexmock
from flask.ext.storage import MockStorage
from flask_uploads import init
class TestCase(object):
added_objects = []
committed_objects = []
created_objects = []
deleted_objects = []
def setup_method(self, method, resizer=None):
init(db_mock, MockStorage, resizer)
self.db = db_mock
self.Storage = MockStorage
self.storage = MockStorage()
self.resizer = resizer
def teardown_method(self, method):
# Empty the stacks.
TestCase.added_objects[:] = []
TestCase.committed_objects[:] = []
TestCase.created_objects[:] = []
TestCase.deleted_objects[:] = []
class MockModel(object):
def __init__(self, **kw):
TestCase.created_objects.append(self)
for key, val in kw.iteritems():
setattr(self, key, val)
db_mock = flexmock(
Column=lambda *a, **kw: ('column', a, kw),
Integer=('integer', [], {}),
Unicode=lambda *a, **kw: ('unicode', a, kw),
Model=MockModel,
session=flexmock(
add=TestCase.added_objects.append,
commit=lambda: TestCase.committed_objects.extend(
TestCase.added_objects + TestCase.deleted_objects
),
delete=TestCase.deleted_objects.append,
),
)
|
from flexmock import flexmock
from flask.ext.storage import MockStorage
from flask_uploads import init
class TestCase(object):
added_objects = []
committed_objects = []
created_objects = []
deleted_objects = []
def setup_method(self, method, resizer=None):
init(db_mock, MockStorage, resizer)
self.db = db_mock
self.Storage = MockStorage
self.storage = MockStorage()
self.resizer = resizer
def teardown_method(self, method):
# Empty the stacks.
TestCase.added_objects[:] = []
TestCase.committed_objects[:] = []
TestCase.created_objects[:] = []
TestCase.deleted_objects[:] = []
class MockModel(object):
def __init__(self, **kw):
TestCase.created_objects.append(self)
for key, val in kw.iteritems():
setattr(self, key, val)
db_mock = flexmock(
Column=lambda *a, **kw: ('column', a, kw),
Integer=('integer', [], {}),
Unicode=lambda *a, **kw: ('unicode', a, kw),
Model=MockModel,
metadata=flexmock(tables={}),
session=flexmock(
add=TestCase.added_objects.append,
commit=lambda: TestCase.committed_objects.extend(
TestCase.added_objects + TestCase.deleted_objects
),
delete=TestCase.deleted_objects.append,
),
)
|
Add metadata.tables to mock db.
|
Add metadata.tables to mock db.
|
Python
|
mit
|
FelixLoether/flask-uploads,FelixLoether/flask-image-upload-thing
|
from flexmock import flexmock
from flask.ext.storage import MockStorage
from flask_uploads import init
class TestCase(object):
added_objects = []
committed_objects = []
created_objects = []
deleted_objects = []
def setup_method(self, method, resizer=None):
init(db_mock, MockStorage, resizer)
self.db = db_mock
self.Storage = MockStorage
self.storage = MockStorage()
self.resizer = resizer
def teardown_method(self, method):
# Empty the stacks.
TestCase.added_objects[:] = []
TestCase.committed_objects[:] = []
TestCase.created_objects[:] = []
TestCase.deleted_objects[:] = []
class MockModel(object):
def __init__(self, **kw):
TestCase.created_objects.append(self)
for key, val in kw.iteritems():
setattr(self, key, val)
db_mock = flexmock(
Column=lambda *a, **kw: ('column', a, kw),
Integer=('integer', [], {}),
Unicode=lambda *a, **kw: ('unicode', a, kw),
Model=MockModel,
+ metadata=flexmock(tables={}),
session=flexmock(
add=TestCase.added_objects.append,
commit=lambda: TestCase.committed_objects.extend(
TestCase.added_objects + TestCase.deleted_objects
),
delete=TestCase.deleted_objects.append,
),
)
|
Add metadata.tables to mock db.
|
## Code Before:
from flexmock import flexmock
from flask.ext.storage import MockStorage
from flask_uploads import init
class TestCase(object):
added_objects = []
committed_objects = []
created_objects = []
deleted_objects = []
def setup_method(self, method, resizer=None):
init(db_mock, MockStorage, resizer)
self.db = db_mock
self.Storage = MockStorage
self.storage = MockStorage()
self.resizer = resizer
def teardown_method(self, method):
# Empty the stacks.
TestCase.added_objects[:] = []
TestCase.committed_objects[:] = []
TestCase.created_objects[:] = []
TestCase.deleted_objects[:] = []
class MockModel(object):
def __init__(self, **kw):
TestCase.created_objects.append(self)
for key, val in kw.iteritems():
setattr(self, key, val)
db_mock = flexmock(
Column=lambda *a, **kw: ('column', a, kw),
Integer=('integer', [], {}),
Unicode=lambda *a, **kw: ('unicode', a, kw),
Model=MockModel,
session=flexmock(
add=TestCase.added_objects.append,
commit=lambda: TestCase.committed_objects.extend(
TestCase.added_objects + TestCase.deleted_objects
),
delete=TestCase.deleted_objects.append,
),
)
## Instruction:
Add metadata.tables to mock db.
## Code After:
from flexmock import flexmock
from flask.ext.storage import MockStorage
from flask_uploads import init
class TestCase(object):
added_objects = []
committed_objects = []
created_objects = []
deleted_objects = []
def setup_method(self, method, resizer=None):
init(db_mock, MockStorage, resizer)
self.db = db_mock
self.Storage = MockStorage
self.storage = MockStorage()
self.resizer = resizer
def teardown_method(self, method):
# Empty the stacks.
TestCase.added_objects[:] = []
TestCase.committed_objects[:] = []
TestCase.created_objects[:] = []
TestCase.deleted_objects[:] = []
class MockModel(object):
def __init__(self, **kw):
TestCase.created_objects.append(self)
for key, val in kw.iteritems():
setattr(self, key, val)
db_mock = flexmock(
Column=lambda *a, **kw: ('column', a, kw),
Integer=('integer', [], {}),
Unicode=lambda *a, **kw: ('unicode', a, kw),
Model=MockModel,
metadata=flexmock(tables={}),
session=flexmock(
add=TestCase.added_objects.append,
commit=lambda: TestCase.committed_objects.extend(
TestCase.added_objects + TestCase.deleted_objects
),
delete=TestCase.deleted_objects.append,
),
)
|
from flexmock import flexmock
from flask.ext.storage import MockStorage
from flask_uploads import init
class TestCase(object):
added_objects = []
committed_objects = []
created_objects = []
deleted_objects = []
def setup_method(self, method, resizer=None):
init(db_mock, MockStorage, resizer)
self.db = db_mock
self.Storage = MockStorage
self.storage = MockStorage()
self.resizer = resizer
def teardown_method(self, method):
# Empty the stacks.
TestCase.added_objects[:] = []
TestCase.committed_objects[:] = []
TestCase.created_objects[:] = []
TestCase.deleted_objects[:] = []
class MockModel(object):
def __init__(self, **kw):
TestCase.created_objects.append(self)
for key, val in kw.iteritems():
setattr(self, key, val)
db_mock = flexmock(
Column=lambda *a, **kw: ('column', a, kw),
Integer=('integer', [], {}),
Unicode=lambda *a, **kw: ('unicode', a, kw),
Model=MockModel,
+ metadata=flexmock(tables={}),
session=flexmock(
add=TestCase.added_objects.append,
commit=lambda: TestCase.committed_objects.extend(
TestCase.added_objects + TestCase.deleted_objects
),
delete=TestCase.deleted_objects.append,
),
)
|
ced02ae257246e700caa0da075d86becccc3b5c9
|
jarn/viewdoc/colors.py
|
jarn/viewdoc/colors.py
|
import os
import functools
import blessed
def color(func):
functools.wraps(func)
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
return func(string)
return wrapper
term = blessed.Terminal()
bold = color(term.bold)
blue = color(term.bold_blue)
green = color(term.bold_green)
red = color(term.bold_red)
|
import os
import functools
import blessed
def color(func):
assignments = functools.WRAPPER_ASSIGNMENTS
if not hasattr(func, '__name__'):
assignments = [x for x in assignments if x != '__name__']
@functools.wraps(func, assignments)
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
return func(string)
return wrapper
term = blessed.Terminal()
bold = color(term.bold)
blue = color(term.bold_blue)
green = color(term.bold_green)
red = color(term.bold_red)
|
Fix wrapping in color decorator.
|
Fix wrapping in color decorator.
|
Python
|
bsd-2-clause
|
Jarn/jarn.viewdoc
|
import os
import functools
import blessed
def color(func):
+ assignments = functools.WRAPPER_ASSIGNMENTS
+ if not hasattr(func, '__name__'):
+ assignments = [x for x in assignments if x != '__name__']
+
- functools.wraps(func)
+ @functools.wraps(func, assignments)
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
return func(string)
return wrapper
term = blessed.Terminal()
bold = color(term.bold)
blue = color(term.bold_blue)
green = color(term.bold_green)
red = color(term.bold_red)
|
Fix wrapping in color decorator.
|
## Code Before:
import os
import functools
import blessed
def color(func):
functools.wraps(func)
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
return func(string)
return wrapper
term = blessed.Terminal()
bold = color(term.bold)
blue = color(term.bold_blue)
green = color(term.bold_green)
red = color(term.bold_red)
## Instruction:
Fix wrapping in color decorator.
## Code After:
import os
import functools
import blessed
def color(func):
assignments = functools.WRAPPER_ASSIGNMENTS
if not hasattr(func, '__name__'):
assignments = [x for x in assignments if x != '__name__']
@functools.wraps(func, assignments)
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
return func(string)
return wrapper
term = blessed.Terminal()
bold = color(term.bold)
blue = color(term.bold_blue)
green = color(term.bold_green)
red = color(term.bold_red)
|
import os
import functools
import blessed
def color(func):
+ assignments = functools.WRAPPER_ASSIGNMENTS
+ if not hasattr(func, '__name__'):
+ assignments = [x for x in assignments if x != '__name__']
+
- functools.wraps(func)
+ @functools.wraps(func, assignments)
? + +++++++++++++
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
return func(string)
return wrapper
term = blessed.Terminal()
bold = color(term.bold)
blue = color(term.bold_blue)
green = color(term.bold_green)
red = color(term.bold_red)
|
9311cbe8ed7a434adb46340640895b48e8cc4027
|
examples/pi-montecarlo/pi_distarray.py
|
examples/pi-montecarlo/pi_distarray.py
|
import sys
from distarray.client import RandomModule, Context
from util import timer
context = Context()
random = RandomModule(context)
@timer
def calc_pi(n):
"""Estimate pi using distributed NumPy arrays."""
x = random.rand((n,))
y = random.rand((n,))
r = context.hypot(x, y)
return 4 * float((r < 1.).sum())/n
if __name__ == '__main__':
N = int(sys.argv[1])
result, time = calc_pi(N)
print('time : %3.4g\nresult: %.7f' % (time, result))
context.view.client.purge_everything()
|
import sys
from distarray.random import Random
from distarray.client import Context
from util import timer
context = Context()
random = Random(context)
@timer
def calc_pi(n):
"""Estimate pi using distributed NumPy arrays."""
x = random.rand((n,))
y = random.rand((n,))
r = context.hypot(x, y)
return 4 * float((r < 1.).sum())/n
if __name__ == '__main__':
N = int(sys.argv[1])
result, time = calc_pi(N)
print('time : %3.4g\nresult: %.7f' % (time, result))
context.view.client.purge_everything()
|
Change to reflect recent API changes.
|
Change to reflect recent API changes.
|
Python
|
bsd-3-clause
|
RaoUmer/distarray,enthought/distarray,RaoUmer/distarray,enthought/distarray
|
import sys
+
+ from distarray.random import Random
- from distarray.client import RandomModule, Context
+ from distarray.client import Context
from util import timer
context = Context()
- random = RandomModule(context)
+ random = Random(context)
+
@timer
def calc_pi(n):
"""Estimate pi using distributed NumPy arrays."""
x = random.rand((n,))
y = random.rand((n,))
r = context.hypot(x, y)
return 4 * float((r < 1.).sum())/n
if __name__ == '__main__':
N = int(sys.argv[1])
result, time = calc_pi(N)
print('time : %3.4g\nresult: %.7f' % (time, result))
context.view.client.purge_everything()
|
Change to reflect recent API changes.
|
## Code Before:
import sys
from distarray.client import RandomModule, Context
from util import timer
context = Context()
random = RandomModule(context)
@timer
def calc_pi(n):
"""Estimate pi using distributed NumPy arrays."""
x = random.rand((n,))
y = random.rand((n,))
r = context.hypot(x, y)
return 4 * float((r < 1.).sum())/n
if __name__ == '__main__':
N = int(sys.argv[1])
result, time = calc_pi(N)
print('time : %3.4g\nresult: %.7f' % (time, result))
context.view.client.purge_everything()
## Instruction:
Change to reflect recent API changes.
## Code After:
import sys
from distarray.random import Random
from distarray.client import Context
from util import timer
context = Context()
random = Random(context)
@timer
def calc_pi(n):
"""Estimate pi using distributed NumPy arrays."""
x = random.rand((n,))
y = random.rand((n,))
r = context.hypot(x, y)
return 4 * float((r < 1.).sum())/n
if __name__ == '__main__':
N = int(sys.argv[1])
result, time = calc_pi(N)
print('time : %3.4g\nresult: %.7f' % (time, result))
context.view.client.purge_everything()
|
import sys
+
+ from distarray.random import Random
- from distarray.client import RandomModule, Context
? --------------
+ from distarray.client import Context
from util import timer
context = Context()
- random = RandomModule(context)
? ------
+ random = Random(context)
+
@timer
def calc_pi(n):
"""Estimate pi using distributed NumPy arrays."""
x = random.rand((n,))
y = random.rand((n,))
r = context.hypot(x, y)
return 4 * float((r < 1.).sum())/n
if __name__ == '__main__':
N = int(sys.argv[1])
result, time = calc_pi(N)
print('time : %3.4g\nresult: %.7f' % (time, result))
context.view.client.purge_everything()
|
f870254cfed6f5ea0f88dae910f5c80b7f325e9a
|
freeze/urls.py
|
freeze/urls.py
|
from django.conf.urls import url
from freeze import views
urlpatterns = [
url(r'^download-static-site/$', views.download_static_site, name='freeze_download_static_site'),
url(r'^generate-static-site/$', views.generate_static_site, name='freeze_generate_static_site'),
]
|
if django.VERSION < (2, 0):
from django.conf.urls import include, url as path
else:
from django.urls import include, path
from freeze import views
urlpatterns = [
path("download-static-site/", views.download_static_site, name="freeze_download_static_site"),
path("generate-static-site/", views.generate_static_site, name="freeze_generate_static_site"),
]
|
Support for newer versions of django
|
Support for newer versions of django
|
Python
|
mit
|
fabiocaccamo/django-freeze,fabiocaccamo/django-freeze,fabiocaccamo/django-freeze
|
+ if django.VERSION < (2, 0):
- from django.conf.urls import url
+ from django.conf.urls import include, url as path
+ else:
+ from django.urls import include, path
from freeze import views
urlpatterns = [
- url(r'^download-static-site/$', views.download_static_site, name='freeze_download_static_site'),
+ path("download-static-site/", views.download_static_site, name="freeze_download_static_site"),
- url(r'^generate-static-site/$', views.generate_static_site, name='freeze_generate_static_site'),
+ path("generate-static-site/", views.generate_static_site, name="freeze_generate_static_site"),
]
-
|
Support for newer versions of django
|
## Code Before:
from django.conf.urls import url
from freeze import views
urlpatterns = [
url(r'^download-static-site/$', views.download_static_site, name='freeze_download_static_site'),
url(r'^generate-static-site/$', views.generate_static_site, name='freeze_generate_static_site'),
]
## Instruction:
Support for newer versions of django
## Code After:
if django.VERSION < (2, 0):
from django.conf.urls import include, url as path
else:
from django.urls import include, path
from freeze import views
urlpatterns = [
path("download-static-site/", views.download_static_site, name="freeze_download_static_site"),
path("generate-static-site/", views.generate_static_site, name="freeze_generate_static_site"),
]
|
+ if django.VERSION < (2, 0):
- from django.conf.urls import url
+ from django.conf.urls import include, url as path
? ++++ +++++++++ ++++++++
+ else:
+ from django.urls import include, path
from freeze import views
urlpatterns = [
- url(r'^download-static-site/$', views.download_static_site, name='freeze_download_static_site'),
? ^^^ ^^^ ^^ ^ ^
+ path("download-static-site/", views.download_static_site, name="freeze_download_static_site"),
? ^^^^ ^ ^ ^ ^
- url(r'^generate-static-site/$', views.generate_static_site, name='freeze_generate_static_site'),
? ^^^ ^^^ ^^ ^ ^
+ path("generate-static-site/", views.generate_static_site, name="freeze_generate_static_site"),
? ^^^^ ^ ^ ^ ^
]
-
|
23e809db71889cec7b2af03b978ecb339853fe51
|
satchless/cart/views.py
|
satchless/cart/views.py
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic.simple import direct_to_template
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return direct_to_template(request,
'satchless/cart/view.html',
{'cart': cart, 'formset': formset})
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return render_to_response(
['satchless/cart/%s/view.html' % typ, 'satchless/cart/view.html'],
{'cart': cart, 'formset': formset},
context_instance=RequestContext(request))
|
Allow prefixed templates for different cart types
|
Allow prefixed templates for different cart types
|
Python
|
bsd-3-clause
|
taedori81/satchless,fusionbox/satchless,fusionbox/satchless,fusionbox/satchless
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
- from django.views.generic.simple import direct_to_template
+ from django.shortcuts import render_to_response
+ from django.template import RequestContext
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
- return direct_to_template(request,
- 'satchless/cart/view.html',
+ return render_to_response(
+ ['satchless/cart/%s/view.html' % typ, 'satchless/cart/view.html'],
- {'cart': cart, 'formset': formset})
+ {'cart': cart, 'formset': formset},
+ context_instance=RequestContext(request))
|
Allow prefixed templates for different cart types
|
## Code Before:
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic.simple import direct_to_template
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return direct_to_template(request,
'satchless/cart/view.html',
{'cart': cart, 'formset': formset})
## Instruction:
Allow prefixed templates for different cart types
## Code After:
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return render_to_response(
['satchless/cart/%s/view.html' % typ, 'satchless/cart/view.html'],
{'cart': cart, 'formset': formset},
context_instance=RequestContext(request))
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
- from django.views.generic.simple import direct_to_template
+ from django.shortcuts import render_to_response
+ from django.template import RequestContext
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
- return direct_to_template(request,
- 'satchless/cart/view.html',
+ return render_to_response(
+ ['satchless/cart/%s/view.html' % typ, 'satchless/cart/view.html'],
- {'cart': cart, 'formset': formset})
? ^
+ {'cart': cart, 'formset': formset},
? ^
+ context_instance=RequestContext(request))
|
00cea9f8e51f53f338e19adf0165031d2f9cad77
|
c2corg_ui/templates/utils/format.py
|
c2corg_ui/templates/utils/format.py
|
import bbcode
import markdown
import html
from c2corg_ui.format.wikilinks import C2CWikiLinkExtension
_markdown_parser = None
_bbcode_parser = None
def _get_markdown_parser():
global _markdown_parser
if not _markdown_parser:
extensions = [
C2CWikiLinkExtension(),
]
_markdown_parser = markdown.Markdown(output_format='xhtml5',
extensions=extensions)
return _markdown_parser
def _get_bbcode_parser():
global _bbcode_parser
if not _bbcode_parser:
_bbcode_parser = bbcode.Parser(escape_html=False, newline='\n')
return _bbcode_parser
def parse_code(text, md=True, bb=True):
if md:
text = _get_markdown_parser().convert(text)
if bb:
text = _get_bbcode_parser().format(text)
return text
def sanitize(text):
return html.escape(text)
|
import bbcode
import markdown
import html
from c2corg_ui.format.wikilinks import C2CWikiLinkExtension
from markdown.extensions.nl2br import Nl2BrExtension
from markdown.extensions.toc import TocExtension
_markdown_parser = None
_bbcode_parser = None
def _get_markdown_parser():
global _markdown_parser
if not _markdown_parser:
extensions = [
C2CWikiLinkExtension(),
Nl2BrExtension(),
TocExtension(marker='[toc]', baselevel=2),
]
_markdown_parser = markdown.Markdown(output_format='xhtml5',
extensions=extensions)
return _markdown_parser
def _get_bbcode_parser():
global _bbcode_parser
if not _bbcode_parser:
_bbcode_parser = bbcode.Parser(escape_html=False, newline='\n')
return _bbcode_parser
def parse_code(text, md=True, bb=True):
if md:
text = _get_markdown_parser().convert(text)
if bb:
text = _get_bbcode_parser().format(text)
return text
def sanitize(text):
return html.escape(text)
|
Enable markdown extensions for TOC and linebreaks
|
Enable markdown extensions for TOC and linebreaks
|
Python
|
agpl-3.0
|
Courgetteandratatouille/v6_ui,Courgetteandratatouille/v6_ui,olaurendeau/v6_ui,c2corg/v6_ui,c2corg/v6_ui,c2corg/v6_ui,Courgetteandratatouille/v6_ui,olaurendeau/v6_ui,olaurendeau/v6_ui,c2corg/v6_ui,Courgetteandratatouille/v6_ui,olaurendeau/v6_ui
|
import bbcode
import markdown
import html
from c2corg_ui.format.wikilinks import C2CWikiLinkExtension
+ from markdown.extensions.nl2br import Nl2BrExtension
+ from markdown.extensions.toc import TocExtension
_markdown_parser = None
_bbcode_parser = None
def _get_markdown_parser():
global _markdown_parser
if not _markdown_parser:
extensions = [
C2CWikiLinkExtension(),
+ Nl2BrExtension(),
+ TocExtension(marker='[toc]', baselevel=2),
]
_markdown_parser = markdown.Markdown(output_format='xhtml5',
extensions=extensions)
return _markdown_parser
def _get_bbcode_parser():
global _bbcode_parser
if not _bbcode_parser:
_bbcode_parser = bbcode.Parser(escape_html=False, newline='\n')
return _bbcode_parser
def parse_code(text, md=True, bb=True):
if md:
text = _get_markdown_parser().convert(text)
if bb:
text = _get_bbcode_parser().format(text)
return text
def sanitize(text):
return html.escape(text)
|
Enable markdown extensions for TOC and linebreaks
|
## Code Before:
import bbcode
import markdown
import html
from c2corg_ui.format.wikilinks import C2CWikiLinkExtension
_markdown_parser = None
_bbcode_parser = None
def _get_markdown_parser():
global _markdown_parser
if not _markdown_parser:
extensions = [
C2CWikiLinkExtension(),
]
_markdown_parser = markdown.Markdown(output_format='xhtml5',
extensions=extensions)
return _markdown_parser
def _get_bbcode_parser():
global _bbcode_parser
if not _bbcode_parser:
_bbcode_parser = bbcode.Parser(escape_html=False, newline='\n')
return _bbcode_parser
def parse_code(text, md=True, bb=True):
if md:
text = _get_markdown_parser().convert(text)
if bb:
text = _get_bbcode_parser().format(text)
return text
def sanitize(text):
return html.escape(text)
## Instruction:
Enable markdown extensions for TOC and linebreaks
## Code After:
import bbcode
import markdown
import html
from c2corg_ui.format.wikilinks import C2CWikiLinkExtension
from markdown.extensions.nl2br import Nl2BrExtension
from markdown.extensions.toc import TocExtension
_markdown_parser = None
_bbcode_parser = None
def _get_markdown_parser():
global _markdown_parser
if not _markdown_parser:
extensions = [
C2CWikiLinkExtension(),
Nl2BrExtension(),
TocExtension(marker='[toc]', baselevel=2),
]
_markdown_parser = markdown.Markdown(output_format='xhtml5',
extensions=extensions)
return _markdown_parser
def _get_bbcode_parser():
global _bbcode_parser
if not _bbcode_parser:
_bbcode_parser = bbcode.Parser(escape_html=False, newline='\n')
return _bbcode_parser
def parse_code(text, md=True, bb=True):
if md:
text = _get_markdown_parser().convert(text)
if bb:
text = _get_bbcode_parser().format(text)
return text
def sanitize(text):
return html.escape(text)
|
import bbcode
import markdown
import html
from c2corg_ui.format.wikilinks import C2CWikiLinkExtension
+ from markdown.extensions.nl2br import Nl2BrExtension
+ from markdown.extensions.toc import TocExtension
_markdown_parser = None
_bbcode_parser = None
def _get_markdown_parser():
global _markdown_parser
if not _markdown_parser:
extensions = [
C2CWikiLinkExtension(),
+ Nl2BrExtension(),
+ TocExtension(marker='[toc]', baselevel=2),
]
_markdown_parser = markdown.Markdown(output_format='xhtml5',
extensions=extensions)
return _markdown_parser
def _get_bbcode_parser():
global _bbcode_parser
if not _bbcode_parser:
_bbcode_parser = bbcode.Parser(escape_html=False, newline='\n')
return _bbcode_parser
def parse_code(text, md=True, bb=True):
if md:
text = _get_markdown_parser().convert(text)
if bb:
text = _get_bbcode_parser().format(text)
return text
def sanitize(text):
return html.escape(text)
|
2fc23ca753ca68d3c0531cf9c58d5864adfc373f
|
tests/test_short_url.py
|
tests/test_short_url.py
|
import unittest
from random import randrange
import short_url
class TestShortUrl(unittest.TestCase):
def test_one(self):
url = short_url.encode_url(12)
self.assertEqual(url, 'jy7yj')
key = short_url.decode_url(url)
self.assertEqual(key, 12)
def test_1000_random(self):
for random_int in range(1000):
random_int = randrange(100000000)
url = short_url.encode_url(random_int)
int_ = short_url.decode_url(url)
self.assertEqual(random_int, int_)
def test_custom_alphabet(self):
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
self.assertEqual(url, 'bbaaaaaaaaaaaaaaaaaaaa')
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
self.assertEqual(key, 12)
def test_short_alphabet(self):
with self.assertRaises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with self.assertRaises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
from random import randrange
from pytest import raises
import short_url
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
Use simple test functions and remove too special tests
|
Use simple test functions and remove too special tests
|
Python
|
mit
|
Alir3z4/python-short_url
|
- import unittest
+
from random import randrange
+
+ from pytest import raises
import short_url
- class TestShortUrl(unittest.TestCase):
- def test_one(self):
- url = short_url.encode_url(12)
- self.assertEqual(url, 'jy7yj')
- key = short_url.decode_url(url)
- self.assertEqual(key, 12)
- def test_1000_random(self):
- for random_int in range(1000):
- random_int = randrange(100000000)
- url = short_url.encode_url(random_int)
- int_ = short_url.decode_url(url)
- self.assertEqual(random_int, int_)
- def test_custom_alphabet(self):
+ def test_custom_alphabet():
- encoder = short_url.UrlEncoder(alphabet='ab')
+ encoder = short_url.UrlEncoder(alphabet='ab')
- url = encoder.encode_url(12)
+ url = encoder.encode_url(12)
- self.assertEqual(url, 'bbaaaaaaaaaaaaaaaaaaaa')
+ assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
- key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
+ key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
- self.assertEqual(key, 12)
+ assert key == 12
- def test_short_alphabet(self):
- with self.assertRaises(AttributeError):
- short_url.UrlEncoder(alphabet='aa')
- with self.assertRaises(AttributeError):
- short_url.UrlEncoder(alphabet='a')
+ def test_too_short_alphabet():
+ with raises(AttributeError):
+ short_url.UrlEncoder(alphabet='aa')
+ with raises(AttributeError):
+ short_url.UrlEncoder(alphabet='a')
+
|
Use simple test functions and remove too special tests
|
## Code Before:
import unittest
from random import randrange
import short_url
class TestShortUrl(unittest.TestCase):
def test_one(self):
url = short_url.encode_url(12)
self.assertEqual(url, 'jy7yj')
key = short_url.decode_url(url)
self.assertEqual(key, 12)
def test_1000_random(self):
for random_int in range(1000):
random_int = randrange(100000000)
url = short_url.encode_url(random_int)
int_ = short_url.decode_url(url)
self.assertEqual(random_int, int_)
def test_custom_alphabet(self):
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
self.assertEqual(url, 'bbaaaaaaaaaaaaaaaaaaaa')
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
self.assertEqual(key, 12)
def test_short_alphabet(self):
with self.assertRaises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with self.assertRaises(AttributeError):
short_url.UrlEncoder(alphabet='a')
## Instruction:
Use simple test functions and remove too special tests
## Code After:
from random import randrange
from pytest import raises
import short_url
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
- import unittest
+
from random import randrange
+
+ from pytest import raises
import short_url
- class TestShortUrl(unittest.TestCase):
- def test_one(self):
- url = short_url.encode_url(12)
- self.assertEqual(url, 'jy7yj')
- key = short_url.decode_url(url)
- self.assertEqual(key, 12)
- def test_1000_random(self):
- for random_int in range(1000):
- random_int = randrange(100000000)
- url = short_url.encode_url(random_int)
- int_ = short_url.decode_url(url)
- self.assertEqual(random_int, int_)
- def test_custom_alphabet(self):
? ---- ----
+ def test_custom_alphabet():
- encoder = short_url.UrlEncoder(alphabet='ab')
? ----
+ encoder = short_url.UrlEncoder(alphabet='ab')
- url = encoder.encode_url(12)
? ----
+ url = encoder.encode_url(12)
- self.assertEqual(url, 'bbaaaaaaaaaaaaaaaaaaaa')
? --------- ^^^^^^ ^ -
+ assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
? ^ ^^^
- key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
? ----
+ key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
- self.assertEqual(key, 12)
+ assert key == 12
+
- def test_short_alphabet(self):
? ---- ----
+ def test_too_short_alphabet():
? ++++
- with self.assertRaises(AttributeError):
? ---- --------- --
+ with raises(AttributeError):
- short_url.UrlEncoder(alphabet='aa')
? ----
+ short_url.UrlEncoder(alphabet='aa')
- with self.assertRaises(AttributeError):
? ---- --------- --
+ with raises(AttributeError):
- short_url.UrlEncoder(alphabet='a')
? ----
+ short_url.UrlEncoder(alphabet='a')
|
4f9bb7a81f52b5ee46be338e5c699411286f1401
|
tasks.py
|
tasks.py
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration_tests(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration_tests, release, docs)
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration, release, docs)
|
Rename integration tests task for consistency w/ other projs
|
Rename integration tests task for consistency w/ other projs
|
Python
|
bsd-2-clause
|
bitprophet/releases
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
- def integration_tests(pty=True):
+ def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
- ns = Collection(test, integration_tests, release, docs)
+ ns = Collection(test, integration, release, docs)
|
Rename integration tests task for consistency w/ other projs
|
## Code Before:
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration_tests(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration_tests, release, docs)
## Instruction:
Rename integration tests task for consistency w/ other projs
## Code After:
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration, release, docs)
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
- def integration_tests(pty=True):
? ------
+ def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
- ns = Collection(test, integration_tests, release, docs)
? ------
+ ns = Collection(test, integration, release, docs)
|
441cccc340afeb205da75762ce6e145215a858b3
|
src/zephyr/delayed_stream.py
|
src/zephyr/delayed_stream.py
|
import threading
import collections
import itertools
import time
import zephyr
class DelayedRealTimeStream(threading.Thread):
def __init__(self, signal_collector, callbacks, delay):
threading.Thread.__init__(self)
self.signal_collector = signal_collector
self.callbacks = callbacks
self.delay = delay
self.stream_output_positions = collections.defaultdict(lambda: 0)
self.terminate_requested = False
def add_callback(self, callback):
self.callbacks.append(callback)
def terminate(self):
self.terminate_requested = True
def run(self):
while not self.terminate_requested:
delayed_current_time = zephyr.time() - self.delay
all_streams = itertools.chain(self.signal_collector.iterate_signal_stream_histories(),
self.signal_collector.iterate_event_streams())
for signal_stream_name, signal_stream_history in all_streams:
from_sample = self.stream_output_positions[signal_stream_name]
for sample in signal_stream_history.iterate_samples(from_sample, delayed_current_time):
self.stream_output_positions[signal_stream_name] += 1
for callback in self.callbacks:
callback(signal_stream_name, sample)
time.sleep(0.01)
|
import threading
import collections
import itertools
import time
import zephyr
class DelayedRealTimeStream(threading.Thread):
def __init__(self, signal_collector, callbacks, default_delay, specific_delays={}):
threading.Thread.__init__(self)
self.signal_collector = signal_collector
self.callbacks = callbacks
self.default_delay = default_delay
self.specific_delays = specific_delays
self.stream_output_positions = collections.defaultdict(lambda: 0)
self.terminate_requested = False
def add_callback(self, callback):
self.callbacks.append(callback)
def terminate(self):
self.terminate_requested = True
def run(self):
while not self.terminate_requested:
now = zephyr.time()
all_streams = itertools.chain(self.signal_collector.iterate_signal_stream_histories(),
self.signal_collector.iterate_event_streams())
for signal_stream_name, signal_stream_history in all_streams:
delay = self.specific_delays.get(signal_stream_name, self.default_delay)
delayed_current_time = now - delay
from_sample = self.stream_output_positions[signal_stream_name]
for sample in signal_stream_history.iterate_samples(from_sample, delayed_current_time):
self.stream_output_positions[signal_stream_name] += 1
for callback in self.callbacks:
callback(signal_stream_name, sample)
time.sleep(0.01)
|
Split delay configuration into default_delay and specific_delays
|
Split delay configuration into default_delay and specific_delays
|
Python
|
bsd-2-clause
|
jpaalasm/zephyr-bt
|
import threading
import collections
import itertools
import time
import zephyr
class DelayedRealTimeStream(threading.Thread):
- def __init__(self, signal_collector, callbacks, delay):
+ def __init__(self, signal_collector, callbacks, default_delay, specific_delays={}):
threading.Thread.__init__(self)
self.signal_collector = signal_collector
self.callbacks = callbacks
- self.delay = delay
+ self.default_delay = default_delay
+ self.specific_delays = specific_delays
self.stream_output_positions = collections.defaultdict(lambda: 0)
self.terminate_requested = False
def add_callback(self, callback):
self.callbacks.append(callback)
def terminate(self):
self.terminate_requested = True
def run(self):
while not self.terminate_requested:
+ now = zephyr.time()
- delayed_current_time = zephyr.time() - self.delay
-
all_streams = itertools.chain(self.signal_collector.iterate_signal_stream_histories(),
self.signal_collector.iterate_event_streams())
for signal_stream_name, signal_stream_history in all_streams:
+ delay = self.specific_delays.get(signal_stream_name, self.default_delay)
+
+ delayed_current_time = now - delay
+
from_sample = self.stream_output_positions[signal_stream_name]
for sample in signal_stream_history.iterate_samples(from_sample, delayed_current_time):
self.stream_output_positions[signal_stream_name] += 1
for callback in self.callbacks:
callback(signal_stream_name, sample)
time.sleep(0.01)
|
Split delay configuration into default_delay and specific_delays
|
## Code Before:
import threading
import collections
import itertools
import time
import zephyr
class DelayedRealTimeStream(threading.Thread):
def __init__(self, signal_collector, callbacks, delay):
threading.Thread.__init__(self)
self.signal_collector = signal_collector
self.callbacks = callbacks
self.delay = delay
self.stream_output_positions = collections.defaultdict(lambda: 0)
self.terminate_requested = False
def add_callback(self, callback):
self.callbacks.append(callback)
def terminate(self):
self.terminate_requested = True
def run(self):
while not self.terminate_requested:
delayed_current_time = zephyr.time() - self.delay
all_streams = itertools.chain(self.signal_collector.iterate_signal_stream_histories(),
self.signal_collector.iterate_event_streams())
for signal_stream_name, signal_stream_history in all_streams:
from_sample = self.stream_output_positions[signal_stream_name]
for sample in signal_stream_history.iterate_samples(from_sample, delayed_current_time):
self.stream_output_positions[signal_stream_name] += 1
for callback in self.callbacks:
callback(signal_stream_name, sample)
time.sleep(0.01)
## Instruction:
Split delay configuration into default_delay and specific_delays
## Code After:
import threading
import collections
import itertools
import time
import zephyr
class DelayedRealTimeStream(threading.Thread):
def __init__(self, signal_collector, callbacks, default_delay, specific_delays={}):
threading.Thread.__init__(self)
self.signal_collector = signal_collector
self.callbacks = callbacks
self.default_delay = default_delay
self.specific_delays = specific_delays
self.stream_output_positions = collections.defaultdict(lambda: 0)
self.terminate_requested = False
def add_callback(self, callback):
self.callbacks.append(callback)
def terminate(self):
self.terminate_requested = True
def run(self):
while not self.terminate_requested:
now = zephyr.time()
all_streams = itertools.chain(self.signal_collector.iterate_signal_stream_histories(),
self.signal_collector.iterate_event_streams())
for signal_stream_name, signal_stream_history in all_streams:
delay = self.specific_delays.get(signal_stream_name, self.default_delay)
delayed_current_time = now - delay
from_sample = self.stream_output_positions[signal_stream_name]
for sample in signal_stream_history.iterate_samples(from_sample, delayed_current_time):
self.stream_output_positions[signal_stream_name] += 1
for callback in self.callbacks:
callback(signal_stream_name, sample)
time.sleep(0.01)
|
import threading
import collections
import itertools
import time
import zephyr
class DelayedRealTimeStream(threading.Thread):
- def __init__(self, signal_collector, callbacks, delay):
+ def __init__(self, signal_collector, callbacks, default_delay, specific_delays={}):
? ++++++++ ++++++++++++++++++++
threading.Thread.__init__(self)
self.signal_collector = signal_collector
self.callbacks = callbacks
- self.delay = delay
+ self.default_delay = default_delay
? ++++++++ ++++++++
+ self.specific_delays = specific_delays
self.stream_output_positions = collections.defaultdict(lambda: 0)
self.terminate_requested = False
def add_callback(self, callback):
self.callbacks.append(callback)
def terminate(self):
self.terminate_requested = True
def run(self):
while not self.terminate_requested:
+ now = zephyr.time()
- delayed_current_time = zephyr.time() - self.delay
-
all_streams = itertools.chain(self.signal_collector.iterate_signal_stream_histories(),
self.signal_collector.iterate_event_streams())
for signal_stream_name, signal_stream_history in all_streams:
+ delay = self.specific_delays.get(signal_stream_name, self.default_delay)
+
+ delayed_current_time = now - delay
+
from_sample = self.stream_output_positions[signal_stream_name]
for sample in signal_stream_history.iterate_samples(from_sample, delayed_current_time):
self.stream_output_positions[signal_stream_name] += 1
for callback in self.callbacks:
callback(signal_stream_name, sample)
time.sleep(0.01)
|
33775cd9e740ac70e9213c37825077516e683e55
|
pyatv/support/device_info.py
|
pyatv/support/device_info.py
|
"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
|
"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
"17L256": "13.4",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
|
Add tvOS 13.4 build number
|
mrp: Add tvOS 13.4 build number
|
Python
|
mit
|
postlund/pyatv,postlund/pyatv
|
"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
+ "17L256": "13.4",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
|
Add tvOS 13.4 build number
|
## Code Before:
"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
## Instruction:
Add tvOS 13.4 build number
## Code After:
"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
"17L256": "13.4",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
|
"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
+ "17L256": "13.4",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
|
d60dea7b7b1fb073eef2c350177b3920f32de748
|
6/e6.py
|
6/e6.py
|
def sum_seq_squares(n):
return (n * (n+1) * ((2*n)+1)) / 6
def sum_seq(n):
return (n * (n + 1)) / 2
def main():
sum_seq_sq_100 = sum_seq_squares(100)
sum_seq_100 = sum_seq(100)
sq_sum_seq_100 = sum_seq_100**2
diff = sq_sum_seq_100 - sum_seq_sq_100
print('diff is {0}'.format(diff))
if __name__ == '__main__':
main()
|
def sum_seq_squares(n):
return (n * (n+1) * ((2*n)+1)) / 6
# http://www.regentsprep.org/regents/math/algtrig/ATP2/ArithSeq.htm
def sum_seq(n):
return (n * (n + 1)) / 2
def main():
sum_seq_sq_100 = sum_seq_squares(100)
sum_seq_100 = sum_seq(100)
sq_sum_seq_100 = sum_seq_100**2
diff = sq_sum_seq_100 - sum_seq_sq_100
print('diff is {0}'.format(diff))
if __name__ == '__main__':
main()
|
Add comments indicating source of formulae..
|
Add comments indicating source of formulae..
|
Python
|
mit
|
cveazey/ProjectEuler,cveazey/ProjectEuler
|
-
def sum_seq_squares(n):
return (n * (n+1) * ((2*n)+1)) / 6
+ # http://www.regentsprep.org/regents/math/algtrig/ATP2/ArithSeq.htm
def sum_seq(n):
return (n * (n + 1)) / 2
def main():
sum_seq_sq_100 = sum_seq_squares(100)
sum_seq_100 = sum_seq(100)
sq_sum_seq_100 = sum_seq_100**2
diff = sq_sum_seq_100 - sum_seq_sq_100
print('diff is {0}'.format(diff))
if __name__ == '__main__':
main()
|
Add comments indicating source of formulae..
|
## Code Before:
def sum_seq_squares(n):
return (n * (n+1) * ((2*n)+1)) / 6
def sum_seq(n):
return (n * (n + 1)) / 2
def main():
sum_seq_sq_100 = sum_seq_squares(100)
sum_seq_100 = sum_seq(100)
sq_sum_seq_100 = sum_seq_100**2
diff = sq_sum_seq_100 - sum_seq_sq_100
print('diff is {0}'.format(diff))
if __name__ == '__main__':
main()
## Instruction:
Add comments indicating source of formulae..
## Code After:
def sum_seq_squares(n):
return (n * (n+1) * ((2*n)+1)) / 6
# http://www.regentsprep.org/regents/math/algtrig/ATP2/ArithSeq.htm
def sum_seq(n):
return (n * (n + 1)) / 2
def main():
sum_seq_sq_100 = sum_seq_squares(100)
sum_seq_100 = sum_seq(100)
sq_sum_seq_100 = sum_seq_100**2
diff = sq_sum_seq_100 - sum_seq_sq_100
print('diff is {0}'.format(diff))
if __name__ == '__main__':
main()
|
-
def sum_seq_squares(n):
return (n * (n+1) * ((2*n)+1)) / 6
+ # http://www.regentsprep.org/regents/math/algtrig/ATP2/ArithSeq.htm
def sum_seq(n):
return (n * (n + 1)) / 2
def main():
sum_seq_sq_100 = sum_seq_squares(100)
sum_seq_100 = sum_seq(100)
sq_sum_seq_100 = sum_seq_100**2
diff = sq_sum_seq_100 - sum_seq_sq_100
print('diff is {0}'.format(diff))
if __name__ == '__main__':
main()
|
d45c14c1ee3275212535a98db161a0dbd23ed292
|
src/hue/BridgeScanner.py
|
src/hue/BridgeScanner.py
|
__author__ = 'hira'
|
import requests
import json
def get_bridge_ips():
res = requests.get('http://www.meethue.com/api/nupnp').text
data = json.loads(res)
return [map['internalipaddress'] for map in data]
print(get_bridge_ips())
|
Enable finding Hue bridge on network.
|
Enable finding Hue bridge on network.
|
Python
|
mit
|
almichest/hue_app,almichest/hue_app
|
- __author__ = 'hira'
+ import requests
+ import json
+ def get_bridge_ips():
+ res = requests.get('http://www.meethue.com/api/nupnp').text
+ data = json.loads(res)
+ return [map['internalipaddress'] for map in data]
+
+ print(get_bridge_ips())
+
|
Enable finding Hue bridge on network.
|
## Code Before:
__author__ = 'hira'
## Instruction:
Enable finding Hue bridge on network.
## Code After:
import requests
import json
def get_bridge_ips():
res = requests.get('http://www.meethue.com/api/nupnp').text
data = json.loads(res)
return [map['internalipaddress'] for map in data]
print(get_bridge_ips())
|
- __author__ = 'hira'
+ import requests
+ import json
+
+ def get_bridge_ips():
+ res = requests.get('http://www.meethue.com/api/nupnp').text
+ data = json.loads(res)
+ return [map['internalipaddress'] for map in data]
+
+ print(get_bridge_ips())
|
66e2e3bee9996a0cb55c7b802a638e42bc72ccbe
|
zazu/plugins/astyle_styler.py
|
zazu/plugins/astyle_styler.py
|
"""astyle plugin for zazu"""
import zazu.styler
import zazu.util
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2017"
class AstyleStyler(zazu.styler.Styler):
"""Astyle plugin for code styling"""
def style_file(self, file, verbose, dry_run):
"""Run astyle on a file"""
args = ['astyle', '-v'] + self.options
if dry_run:
args.append('--dry-run')
args.append(file)
output = zazu.util.check_output(args)
fix_needed = output.startswith('Formatted ')
return file, fix_needed
@staticmethod
def default_extensions():
return ['*.c',
'*.cc',
'*.cpp',
'*.h',
'*.hpp',
'*.java']
@staticmethod
def type():
return 'astyle'
|
"""astyle plugin for zazu"""
import zazu.styler
import zazu.util
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2017"
class AstyleStyler(zazu.styler.Styler):
"""Astyle plugin for code styling"""
def style_file(self, file, verbose, dry_run):
"""Run astyle on a file"""
args = ['astyle', '--formatted'] + self.options
if dry_run:
args.append('--dry-run')
args.append(file)
output = zazu.util.check_output(args)
return file, bool(output)
@staticmethod
def default_extensions():
return ['*.c',
'*.cc',
'*.cpp',
'*.h',
'*.hpp',
'*.java']
@staticmethod
def type():
return 'astyle'
|
Use formatted flag on astyle to simplify code
|
Use formatted flag on astyle to simplify code
|
Python
|
mit
|
stopthatcow/zazu,stopthatcow/zazu
|
"""astyle plugin for zazu"""
import zazu.styler
import zazu.util
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2017"
class AstyleStyler(zazu.styler.Styler):
"""Astyle plugin for code styling"""
def style_file(self, file, verbose, dry_run):
"""Run astyle on a file"""
- args = ['astyle', '-v'] + self.options
+ args = ['astyle', '--formatted'] + self.options
if dry_run:
args.append('--dry-run')
args.append(file)
output = zazu.util.check_output(args)
+ return file, bool(output)
- fix_needed = output.startswith('Formatted ')
- return file, fix_needed
@staticmethod
def default_extensions():
return ['*.c',
'*.cc',
'*.cpp',
'*.h',
'*.hpp',
'*.java']
@staticmethod
def type():
return 'astyle'
|
Use formatted flag on astyle to simplify code
|
## Code Before:
"""astyle plugin for zazu"""
import zazu.styler
import zazu.util
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2017"
class AstyleStyler(zazu.styler.Styler):
"""Astyle plugin for code styling"""
def style_file(self, file, verbose, dry_run):
"""Run astyle on a file"""
args = ['astyle', '-v'] + self.options
if dry_run:
args.append('--dry-run')
args.append(file)
output = zazu.util.check_output(args)
fix_needed = output.startswith('Formatted ')
return file, fix_needed
@staticmethod
def default_extensions():
return ['*.c',
'*.cc',
'*.cpp',
'*.h',
'*.hpp',
'*.java']
@staticmethod
def type():
return 'astyle'
## Instruction:
Use formatted flag on astyle to simplify code
## Code After:
"""astyle plugin for zazu"""
import zazu.styler
import zazu.util
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2017"
class AstyleStyler(zazu.styler.Styler):
"""Astyle plugin for code styling"""
def style_file(self, file, verbose, dry_run):
"""Run astyle on a file"""
args = ['astyle', '--formatted'] + self.options
if dry_run:
args.append('--dry-run')
args.append(file)
output = zazu.util.check_output(args)
return file, bool(output)
@staticmethod
def default_extensions():
return ['*.c',
'*.cc',
'*.cpp',
'*.h',
'*.hpp',
'*.java']
@staticmethod
def type():
return 'astyle'
|
"""astyle plugin for zazu"""
import zazu.styler
import zazu.util
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2017"
class AstyleStyler(zazu.styler.Styler):
"""Astyle plugin for code styling"""
def style_file(self, file, verbose, dry_run):
"""Run astyle on a file"""
- args = ['astyle', '-v'] + self.options
? ^
+ args = ['astyle', '--formatted'] + self.options
? ^^^^^^^^^^
if dry_run:
args.append('--dry-run')
args.append(file)
output = zazu.util.check_output(args)
+ return file, bool(output)
- fix_needed = output.startswith('Formatted ')
- return file, fix_needed
@staticmethod
def default_extensions():
return ['*.c',
'*.cc',
'*.cpp',
'*.h',
'*.hpp',
'*.java']
@staticmethod
def type():
return 'astyle'
|
f30a560db83d8a7ac87685c69f5b519faaa929fa
|
project_issue_department/__openerp__.py
|
project_issue_department/__openerp__.py
|
{
'name': 'Project Issue with Department',
'version': '1.1',
"category": "Project Management",
'description': """\
Add Department field to Project Issues.
Selecting a Project for an issue will automatically populate this with the
Project's defined Department.
""",
'author': 'Daniel Reis',
'website': '[email protected]',
'depends': [
'project_issue',
'project_department',
],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False,
'auto_install': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
{
'name': 'Project Issue with Department',
'version': '1.1',
"category": "Project Management",
'description': """\
Add Department field to Project Issues.
Selecting a Project for an issue will automatically populate this with the
Project's defined Department.
""",
'author': 'Daniel Reis',
'website': '[email protected]',
'depends': [
'project_issue',
'project_department',
],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False,
'auto_install': True,
}
|
Fix pep8 to pass super checks
|
Fix pep8 to pass super checks
|
Python
|
agpl-3.0
|
OCA/department,Antiun/department,acsone/department,kmee/department,Endika/department
|
{
'name': 'Project Issue with Department',
'version': '1.1',
"category": "Project Management",
'description': """\
Add Department field to Project Issues.
Selecting a Project for an issue will automatically populate this with the
Project's defined Department.
""",
'author': 'Daniel Reis',
'website': '[email protected]',
'depends': [
'project_issue',
'project_department',
- ],
+ ],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False,
'auto_install': True,
}
- # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Fix pep8 to pass super checks
|
## Code Before:
{
'name': 'Project Issue with Department',
'version': '1.1',
"category": "Project Management",
'description': """\
Add Department field to Project Issues.
Selecting a Project for an issue will automatically populate this with the
Project's defined Department.
""",
'author': 'Daniel Reis',
'website': '[email protected]',
'depends': [
'project_issue',
'project_department',
],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False,
'auto_install': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
## Instruction:
Fix pep8 to pass super checks
## Code After:
{
'name': 'Project Issue with Department',
'version': '1.1',
"category": "Project Management",
'description': """\
Add Department field to Project Issues.
Selecting a Project for an issue will automatically populate this with the
Project's defined Department.
""",
'author': 'Daniel Reis',
'website': '[email protected]',
'depends': [
'project_issue',
'project_department',
],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False,
'auto_install': True,
}
|
{
'name': 'Project Issue with Department',
'version': '1.1',
"category": "Project Management",
'description': """\
Add Department field to Project Issues.
Selecting a Project for an issue will automatically populate this with the
Project's defined Department.
""",
'author': 'Daniel Reis',
'website': '[email protected]',
'depends': [
'project_issue',
'project_department',
- ],
? ----
+ ],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False,
'auto_install': True,
}
- # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
e82474c0281aebe3b623a5be9adc0adf14fa58d5
|
ann_util.py
|
ann_util.py
|
import math
import random
def logistic(x):
return 1.0 / (1 + math.exp(-x))
def deriv_logistic(x):
lgst = logistic(x)
return (1 - lgst) * lgst
def hyperbolic_tangent(x):
return math.tanh(x)
def deriv_hyperbolic_tangent(x):
th = math.tanh(x)
return 1 - th * th
def between(min, max):
"""
Return a real random value between min and max.
"""
return random.random() * (max - min) + min
def make_matrix(N, M):
"""
Make an N rows by M columns matrix.
"""
return [[0 for i in range(M)] for i in range(N)]
|
import math
import pickle
import random
def logistic(x):
return 1.0 / (1 + math.exp(-x))
def deriv_logistic(x):
lgst = logistic(x)
return (1 - lgst) * lgst
def hyperbolic_tangent(x):
return math.tanh(x)
def deriv_hyperbolic_tangent(x):
th = math.tanh(x)
return 1 - th * th
def between(min, max):
"""
Return a real random value between min and max.
"""
return random.random() * (max - min) + min
def make_matrix(N, M):
"""
Make an N rows by M columns matrix.
"""
return [[0 for i in range(M)] for i in range(N)]
def serialize(nn, fname):
with open(fname, 'wb') as f:
pickle.dump(nn, f)
def deserialize(fname):
with open(fname, 'rb') as f:
nn = pickle.load(f)
return nn
|
Add pickle serialize and deserialize
|
Add pickle serialize and deserialize
|
Python
|
apache-2.0
|
Razvy000/ANN_Course
|
import math
+ import pickle
import random
def logistic(x):
return 1.0 / (1 + math.exp(-x))
def deriv_logistic(x):
lgst = logistic(x)
return (1 - lgst) * lgst
def hyperbolic_tangent(x):
return math.tanh(x)
def deriv_hyperbolic_tangent(x):
th = math.tanh(x)
return 1 - th * th
def between(min, max):
"""
Return a real random value between min and max.
"""
return random.random() * (max - min) + min
def make_matrix(N, M):
"""
Make an N rows by M columns matrix.
"""
return [[0 for i in range(M)] for i in range(N)]
+
+ def serialize(nn, fname):
+ with open(fname, 'wb') as f:
+ pickle.dump(nn, f)
+
+
+ def deserialize(fname):
+ with open(fname, 'rb') as f:
+ nn = pickle.load(f)
+ return nn
+
|
Add pickle serialize and deserialize
|
## Code Before:
import math
import random
def logistic(x):
return 1.0 / (1 + math.exp(-x))
def deriv_logistic(x):
lgst = logistic(x)
return (1 - lgst) * lgst
def hyperbolic_tangent(x):
return math.tanh(x)
def deriv_hyperbolic_tangent(x):
th = math.tanh(x)
return 1 - th * th
def between(min, max):
"""
Return a real random value between min and max.
"""
return random.random() * (max - min) + min
def make_matrix(N, M):
"""
Make an N rows by M columns matrix.
"""
return [[0 for i in range(M)] for i in range(N)]
## Instruction:
Add pickle serialize and deserialize
## Code After:
import math
import pickle
import random
def logistic(x):
return 1.0 / (1 + math.exp(-x))
def deriv_logistic(x):
lgst = logistic(x)
return (1 - lgst) * lgst
def hyperbolic_tangent(x):
return math.tanh(x)
def deriv_hyperbolic_tangent(x):
th = math.tanh(x)
return 1 - th * th
def between(min, max):
"""
Return a real random value between min and max.
"""
return random.random() * (max - min) + min
def make_matrix(N, M):
"""
Make an N rows by M columns matrix.
"""
return [[0 for i in range(M)] for i in range(N)]
def serialize(nn, fname):
with open(fname, 'wb') as f:
pickle.dump(nn, f)
def deserialize(fname):
with open(fname, 'rb') as f:
nn = pickle.load(f)
return nn
|
import math
+ import pickle
import random
def logistic(x):
return 1.0 / (1 + math.exp(-x))
def deriv_logistic(x):
lgst = logistic(x)
return (1 - lgst) * lgst
def hyperbolic_tangent(x):
return math.tanh(x)
def deriv_hyperbolic_tangent(x):
th = math.tanh(x)
return 1 - th * th
def between(min, max):
"""
Return a real random value between min and max.
"""
return random.random() * (max - min) + min
def make_matrix(N, M):
"""
Make an N rows by M columns matrix.
"""
return [[0 for i in range(M)] for i in range(N)]
+
+
+ def serialize(nn, fname):
+ with open(fname, 'wb') as f:
+ pickle.dump(nn, f)
+
+
+ def deserialize(fname):
+ with open(fname, 'rb') as f:
+ nn = pickle.load(f)
+ return nn
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.