commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d27b2d71a0e5f834d4758c67fa6e8ed342001a88
|
salt/output/__init__.py
|
salt/output/__init__.py
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
|
Add some checks to output module
|
Add some checks to output module
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
+ # Import salt utils
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
|
Add some checks to output module
|
## Code Before:
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
## Instruction:
Add some checks to output module
## Code After:
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
+ # Import salt utils
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
|
eebb736bf83c572b797931c571e7416223436461
|
homeassistant/components/light/insteon.py
|
homeassistant/components/light/insteon.py
|
from homeassistant.components.insteon import (INSTEON, InsteonToggleDevice)
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the Insteon Hub light platform. """
devs = []
for device in INSTEON.devices:
if device.DeviceCategory == "Switched Lighting Control":
devs.append(InsteonToggleDevice(device))
add_devices(devs)
|
from homeassistant.components.insteon import (INSTEON, InsteonToggleDevice)
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the Insteon Hub light platform. """
devs = []
for device in INSTEON.devices:
if device.DeviceCategory == "Switched Lighting Control":
devs.append(InsteonToggleDevice(device))
if device.DeviceCategory == "Dimmable Lighting Control":
devs.append(InsteonToggleDevice(device))
add_devices(devs)
|
Add ability to control dimmable sources
|
Add ability to control dimmable sources
|
Python
|
mit
|
emilhetty/home-assistant,Duoxilian/home-assistant,rohitranjan1991/home-assistant,toddeye/home-assistant,ct-23/home-assistant,florianholzapfel/home-assistant,kennedyshead/home-assistant,keerts/home-assistant,lukas-hetzenecker/home-assistant,jabesq/home-assistant,JshWright/home-assistant,open-homeautomation/home-assistant,molobrakos/home-assistant,dmeulen/home-assistant,qedi-r/home-assistant,coteyr/home-assistant,molobrakos/home-assistant,leppa/home-assistant,miniconfig/home-assistant,alexmogavero/home-assistant,Duoxilian/home-assistant,DavidLP/home-assistant,instantchow/home-assistant,mKeRix/home-assistant,Smart-Torvy/torvy-home-assistant,hmronline/home-assistant,LinuxChristian/home-assistant,Julian/home-assistant,varunr047/homefile,balloob/home-assistant,kyvinh/home-assistant,rohitranjan1991/home-assistant,aequitas/home-assistant,dmeulen/home-assistant,postlund/home-assistant,luxus/home-assistant,jawilson/home-assistant,auduny/home-assistant,Zac-HD/home-assistant,LinuxChristian/home-assistant,tchellomello/home-assistant,Duoxilian/home-assistant,Julian/home-assistant,mikaelboman/home-assistant,shaftoe/home-assistant,luxus/home-assistant,tboyce1/home-assistant,happyleavesaoc/home-assistant,w1ll1am23/home-assistant,robjohnson189/home-assistant,JshWright/home-assistant,nkgilley/home-assistant,stefan-jonasson/home-assistant,pschmitt/home-assistant,balloob/home-assistant,hmronline/home-assistant,molobrakos/home-assistant,Zac-HD/home-assistant,betrisey/home-assistant,robjohnson189/home-assistant,sffjunkie/home-assistant,mezz64/home-assistant,Smart-Torvy/torvy-home-assistant,varunr047/homefile,jnewland/home-assistant,keerts/home-assistant,tinloaf/home-assistant,sander76/home-assistant,robbiet480/home-assistant,tinloaf/home-assistant,deisi/home-assistant,leoc/home-assistant,ma314smith/home-assistant,mikaelboman/home-assistant,morphis/home-assistant,DavidLP/home-assistant,varunr047/homefile,robjohnson189/home-assistant,ma314smith/home-assistant,open-homeautomation/home-assistant,oandrew/home-assistant,stefan-jonasson/home-assistant,keerts/home-assistant,titilambert/home-assistant,ma314smith/home-assistant,ct-23/home-assistant,tboyce1/home-assistant,toddeye/home-assistant,philipbl/home-assistant,emilhetty/home-assistant,shaftoe/home-assistant,nnic/home-assistant,florianholzapfel/home-assistant,Danielhiversen/home-assistant,xifle/home-assistant,auduny/home-assistant,fbradyirl/home-assistant,philipbl/home-assistant,coteyr/home-assistant,LinuxChristian/home-assistant,jamespcole/home-assistant,mKeRix/home-assistant,alexmogavero/home-assistant,jaharkes/home-assistant,turbokongen/home-assistant,shaftoe/home-assistant,jnewland/home-assistant,betrisey/home-assistant,MungoRae/home-assistant,MungoRae/home-assistant,nugget/home-assistant,oandrew/home-assistant,PetePriority/home-assistant,devdelay/home-assistant,aoakeson/home-assistant,eagleamon/home-assistant,titilambert/home-assistant,nnic/home-assistant,rohitranjan1991/home-assistant,jaharkes/home-assistant,hmronline/home-assistant,tinloaf/home-assistant,devdelay/home-assistant,MungoRae/home-assistant,srcLurker/home-assistant,aronsky/home-assistant,kyvinh/home-assistant,tchellomello/home-assistant,tboyce021/home-assistant,florianholzapfel/home-assistant,Zac-HD/home-assistant,jamespcole/home-assistant,alexmogavero/home-assistant,nugget/home-assistant,eagleamon/home-assistant,alexmogavero/home-assistant,happyleavesaoc/home-assistant,Theb-1/home-assistant,HydrelioxGitHub/home-assistant,luxus/home-assistant,Zac-HD/home-assistant,eagleamon/home-assistant,leoc/home-assistant,mikaelboman/home-assistant,deisi/home-assistant,postlund/home-assistant,mKeRix/home-assistant,ct-23/home-assistant,jawilson/home-assistant,leppa/home-assistant,auduny/home-assistant,happyleavesaoc/home-assistant,soldag/home-assistant,bdfoster/blumate,stefan-jonasson/home-assistant,leoc/home-assistant,instantchow/home-assistant,sffjunkie/home-assistant,sffjunkie/home-assistant,qedi-r/home-assistant,morphis/home-assistant,varunr047/homefile,jaharkes/home-assistant,betrisey/home-assistant,mKeRix/home-assistant,deisi/home-assistant,morphis/home-assistant,persandstrom/home-assistant,oandrew/home-assistant,MartinHjelmare/home-assistant,adrienbrault/home-assistant,Zyell/home-assistant,emilhetty/home-assistant,turbokongen/home-assistant,jabesq/home-assistant,emilhetty/home-assistant,srcLurker/home-assistant,Julian/home-assistant,HydrelioxGitHub/home-assistant,leoc/home-assistant,MartinHjelmare/home-assistant,Cinntax/home-assistant,hexxter/home-assistant,dmeulen/home-assistant,robjohnson189/home-assistant,aequitas/home-assistant,DavidLP/home-assistant,joopert/home-assistant,kennedyshead/home-assistant,philipbl/home-assistant,justyns/home-assistant,instantchow/home-assistant,w1ll1am23/home-assistant,FreekingDean/home-assistant,hexxter/home-assistant,Danielhiversen/home-assistant,betrisey/home-assistant,nnic/home-assistant,Cinntax/home-assistant,varunr047/homefile,mikaelboman/home-assistant,hmronline/home-assistant,Teagan42/home-assistant,mezz64/home-assistant,jabesq/home-assistant,keerts/home-assistant,sdague/home-assistant,morphis/home-assistant,xifle/home-assistant,devdelay/home-assistant,sander76/home-assistant,LinuxChristian/home-assistant,joopert/home-assistant,hmronline/home-assistant,kyvinh/home-assistant,sdague/home-assistant,eagleamon/home-assistant,Smart-Torvy/torvy-home-assistant,tboyce021/home-assistant,ewandor/home-assistant,bdfoster/blumate,ewandor/home-assistant,PetePriority/home-assistant,bdfoster/blumate,open-homeautomation/home-assistant,dmeulen/home-assistant,shaftoe/home-assistant,MungoRae/home-assistant,aequitas/home-assistant,ct-23/home-assistant,xifle/home-assistant,JshWright/home-assistant,LinuxChristian/home-assistant,kyvinh/home-assistant,partofthething/home-assistant,bdfoster/blumate,ewandor/home-assistant,Zyell/home-assistant,emilhetty/home-assistant,FreekingDean/home-assistant,miniconfig/home-assistant,coteyr/home-assistant,Teagan42/home-assistant,mikaelboman/home-assistant,Julian/home-assistant,philipbl/home-assistant,PetePriority/home-assistant,Theb-1/home-assistant,jnewland/home-assistant,ma314smith/home-assistant,GenericStudent/home-assistant,deisi/home-assistant,Smart-Torvy/torvy-home-assistant,nugget/home-assistant,soldag/home-assistant,home-assistant/home-assistant,MungoRae/home-assistant,deisi/home-assistant,robbiet480/home-assistant,sffjunkie/home-assistant,GenericStudent/home-assistant,aoakeson/home-assistant,justyns/home-assistant,xifle/home-assistant,oandrew/home-assistant,Zyell/home-assistant,justyns/home-assistant,sffjunkie/home-assistant,pschmitt/home-assistant,fbradyirl/home-assistant,miniconfig/home-assistant,hexxter/home-assistant,srcLurker/home-assistant,open-homeautomation/home-assistant,srcLurker/home-assistant,JshWright/home-assistant,aronsky/home-assistant,home-assistant/home-assistant,nkgilley/home-assistant,jamespcole/home-assistant,bdfoster/blumate,miniconfig/home-assistant,fbradyirl/home-assistant,Duoxilian/home-assistant,devdelay/home-assistant,jaharkes/home-assistant,Theb-1/home-assistant,stefan-jonasson/home-assistant,florianholzapfel/home-assistant,balloob/home-assistant,ct-23/home-assistant,tboyce1/home-assistant,tboyce1/home-assistant,happyleavesaoc/home-assistant,hexxter/home-assistant,lukas-hetzenecker/home-assistant,adrienbrault/home-assistant,HydrelioxGitHub/home-assistant,partofthething/home-assistant,persandstrom/home-assistant,MartinHjelmare/home-assistant,aoakeson/home-assistant,persandstrom/home-assistant
|
from homeassistant.components.insteon import (INSTEON, InsteonToggleDevice)
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the Insteon Hub light platform. """
devs = []
for device in INSTEON.devices:
if device.DeviceCategory == "Switched Lighting Control":
devs.append(InsteonToggleDevice(device))
+ if device.DeviceCategory == "Dimmable Lighting Control":
+ devs.append(InsteonToggleDevice(device))
add_devices(devs)
|
Add ability to control dimmable sources
|
## Code Before:
from homeassistant.components.insteon import (INSTEON, InsteonToggleDevice)
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the Insteon Hub light platform. """
devs = []
for device in INSTEON.devices:
if device.DeviceCategory == "Switched Lighting Control":
devs.append(InsteonToggleDevice(device))
add_devices(devs)
## Instruction:
Add ability to control dimmable sources
## Code After:
from homeassistant.components.insteon import (INSTEON, InsteonToggleDevice)
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the Insteon Hub light platform. """
devs = []
for device in INSTEON.devices:
if device.DeviceCategory == "Switched Lighting Control":
devs.append(InsteonToggleDevice(device))
if device.DeviceCategory == "Dimmable Lighting Control":
devs.append(InsteonToggleDevice(device))
add_devices(devs)
|
from homeassistant.components.insteon import (INSTEON, InsteonToggleDevice)
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the Insteon Hub light platform. """
devs = []
for device in INSTEON.devices:
if device.DeviceCategory == "Switched Lighting Control":
devs.append(InsteonToggleDevice(device))
+ if device.DeviceCategory == "Dimmable Lighting Control":
+ devs.append(InsteonToggleDevice(device))
add_devices(devs)
|
c6926dda0a9e6e1515721e54788c29d0ef8b58a4
|
tests/test_sqlcompletion.py
|
tests/test_sqlcompletion.py
|
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggest_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggest_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggest_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggest_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_multiple_cols_suggest_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_multiple_tables_suggest_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
|
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_where_suggests_columns_functions():
suggestion = suggest_type('SELECT * FROM tabl WHERE ',
'SELECT * FROM tabl WHERE ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggests_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggests_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggests_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggests_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_col_comma_suggests_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_table_comma_suggests_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
def test_into_suggests_tables():
suggestion = suggest_type('INSERT INTO ',
'INSERT INTO ')
assert suggestion == ('tables', [])
|
Add a test for where clause and rename all tests functions.
|
Add a test for where clause and rename all tests functions.
|
Python
|
bsd-3-clause
|
thedrow/pgcli,d33tah/pgcli,n-someya/pgcli,bitmonk/pgcli,joewalnes/pgcli,yx91490/pgcli,TamasNo1/pgcli,MattOates/pgcli,TamasNo1/pgcli,j-bennet/pgcli,lk1ngaa7/pgcli,zhiyuanshi/pgcli,koljonen/pgcli,dbcli/vcli,dbcli/pgcli,lk1ngaa7/pgcli,dbcli/pgcli,j-bennet/pgcli,suzukaze/pgcli,janusnic/pgcli,darikg/pgcli,johshoff/pgcli,nosun/pgcli,w4ngyi/pgcli,darikg/pgcli,stuartquin/pgcli,dbcli/vcli,bitemyapp/pgcli,d33tah/pgcli,suzukaze/pgcli,bitemyapp/pgcli,koljonen/pgcli,thedrow/pgcli,joewalnes/pgcli,nosun/pgcli,yx91490/pgcli,johshoff/pgcli,janusnic/pgcli,n-someya/pgcli,bitmonk/pgcli,zhiyuanshi/pgcli,w4ngyi/pgcli,MattOates/pgcli
|
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
+ def test_where_suggests_columns_functions():
+ suggestion = suggest_type('SELECT * FROM tabl WHERE ',
+ 'SELECT * FROM tabl WHERE ')
+ assert suggestion == ('columns-and-functions', ['tabl'])
+
- def test_lparen_suggest_cols():
+ def test_lparen_suggests_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
- def test_select_suggest_cols_and_funcs():
+ def test_select_suggests_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
- def test_from_suggest_tables():
+ def test_from_suggests_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
- def test_distinct_suggest_cols():
+ def test_distinct_suggests_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
- def test_multiple_cols_suggest_cols():
+ def test_col_comma_suggests_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
- def test_multiple_tables_suggest_tables():
+ def test_table_comma_suggests_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
+ def test_into_suggests_tables():
+ suggestion = suggest_type('INSERT INTO ',
+ 'INSERT INTO ')
+ assert suggestion == ('tables', [])
+
|
Add a test for where clause and rename all tests functions.
|
## Code Before:
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggest_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggest_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggest_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggest_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_multiple_cols_suggest_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_multiple_tables_suggest_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
## Instruction:
Add a test for where clause and rename all tests functions.
## Code After:
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_where_suggests_columns_functions():
suggestion = suggest_type('SELECT * FROM tabl WHERE ',
'SELECT * FROM tabl WHERE ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggests_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggests_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggests_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggests_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_col_comma_suggests_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_table_comma_suggests_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
def test_into_suggests_tables():
suggestion = suggest_type('INSERT INTO ',
'INSERT INTO ')
assert suggestion == ('tables', [])
|
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
+ def test_where_suggests_columns_functions():
+ suggestion = suggest_type('SELECT * FROM tabl WHERE ',
+ 'SELECT * FROM tabl WHERE ')
+ assert suggestion == ('columns-and-functions', ['tabl'])
+
- def test_lparen_suggest_cols():
+ def test_lparen_suggests_cols():
? +
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
- def test_select_suggest_cols_and_funcs():
+ def test_select_suggests_cols_and_funcs():
? +
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
- def test_from_suggest_tables():
+ def test_from_suggests_tables():
? +
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
- def test_distinct_suggest_cols():
+ def test_distinct_suggests_cols():
? +
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
- def test_multiple_cols_suggest_cols():
? ^^ ----- ^^
+ def test_col_comma_suggests_cols():
? ^^ ^^^ +
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
- def test_multiple_tables_suggest_tables():
? --------- ^
+ def test_table_comma_suggests_tables():
? ^^^^^^ +
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
+
+ def test_into_suggests_tables():
+ suggestion = suggest_type('INSERT INTO ',
+ 'INSERT INTO ')
+ assert suggestion == ('tables', [])
|
8fa346532068aadf510ebcc1ef795527f7b68597
|
frigg_worker/api.py
|
frigg_worker/api.py
|
import logging
import socket
import requests
logger = logging.getLogger(__name__)
class APIWrapper(object):
def __init__(self, options):
self.token = options['hq_token']
self.url = options['hq_url']
@property
def headers(self):
return {
'content-type': 'application/json',
'FRIGG_WORKER_TOKEN': self.token,
'x-frigg-worker-host': socket.getfqdn()
}
def get(self, url):
return requests.post(url, headers=self.headers)
def post(self, url, data):
return requests.post(url, data=data, headers=self.headers)
def report_run(self, endpoint, build_id, build):
response = self.post(self.url, data=build)
logger.info('Reported build to hq, hq response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
if response.status_code != 200:
logger.error('Report of build failed, response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
with open('build-{0}-hq-response.html'.format(build_id), 'w') as f:
f.write(response.text)
return response
|
import logging
import socket
import requests
logger = logging.getLogger(__name__)
class APIWrapper(object):
def __init__(self, options):
self.token = options['hq_token']
self.url = options['hq_url']
@property
def headers(self):
return {
'content-type': 'application/json',
'FRIGG_WORKER_TOKEN': self.token,
'x-frigg-worker-token': self.token,
'x-frigg-worker-host': socket.getfqdn()
}
def get(self, url):
return requests.post(url, headers=self.headers)
def post(self, url, data):
return requests.post(url, data=data, headers=self.headers)
def report_run(self, endpoint, build_id, build):
response = self.post(self.url, data=build)
logger.info('Reported build to hq, hq response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
if response.status_code != 200:
logger.error('Report of build failed, response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
with open('build-{0}-hq-response.html'.format(build_id), 'w') as f:
f.write(response.text)
return response
|
Add x-frigg-worker-token header to hq requests
|
fix: Add x-frigg-worker-token header to hq requests
This will in time be to remove the FRIGG_WORKER_TOKEN header.
|
Python
|
mit
|
frigg/frigg-worker
|
import logging
import socket
import requests
logger = logging.getLogger(__name__)
class APIWrapper(object):
def __init__(self, options):
self.token = options['hq_token']
self.url = options['hq_url']
@property
def headers(self):
return {
'content-type': 'application/json',
'FRIGG_WORKER_TOKEN': self.token,
+ 'x-frigg-worker-token': self.token,
'x-frigg-worker-host': socket.getfqdn()
}
def get(self, url):
return requests.post(url, headers=self.headers)
def post(self, url, data):
return requests.post(url, data=data, headers=self.headers)
def report_run(self, endpoint, build_id, build):
response = self.post(self.url, data=build)
logger.info('Reported build to hq, hq response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
if response.status_code != 200:
logger.error('Report of build failed, response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
with open('build-{0}-hq-response.html'.format(build_id), 'w') as f:
f.write(response.text)
return response
|
Add x-frigg-worker-token header to hq requests
|
## Code Before:
import logging
import socket
import requests
logger = logging.getLogger(__name__)
class APIWrapper(object):
def __init__(self, options):
self.token = options['hq_token']
self.url = options['hq_url']
@property
def headers(self):
return {
'content-type': 'application/json',
'FRIGG_WORKER_TOKEN': self.token,
'x-frigg-worker-host': socket.getfqdn()
}
def get(self, url):
return requests.post(url, headers=self.headers)
def post(self, url, data):
return requests.post(url, data=data, headers=self.headers)
def report_run(self, endpoint, build_id, build):
response = self.post(self.url, data=build)
logger.info('Reported build to hq, hq response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
if response.status_code != 200:
logger.error('Report of build failed, response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
with open('build-{0}-hq-response.html'.format(build_id), 'w') as f:
f.write(response.text)
return response
## Instruction:
Add x-frigg-worker-token header to hq requests
## Code After:
import logging
import socket
import requests
logger = logging.getLogger(__name__)
class APIWrapper(object):
def __init__(self, options):
self.token = options['hq_token']
self.url = options['hq_url']
@property
def headers(self):
return {
'content-type': 'application/json',
'FRIGG_WORKER_TOKEN': self.token,
'x-frigg-worker-token': self.token,
'x-frigg-worker-host': socket.getfqdn()
}
def get(self, url):
return requests.post(url, headers=self.headers)
def post(self, url, data):
return requests.post(url, data=data, headers=self.headers)
def report_run(self, endpoint, build_id, build):
response = self.post(self.url, data=build)
logger.info('Reported build to hq, hq response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
if response.status_code != 200:
logger.error('Report of build failed, response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
with open('build-{0}-hq-response.html'.format(build_id), 'w') as f:
f.write(response.text)
return response
|
import logging
import socket
import requests
logger = logging.getLogger(__name__)
class APIWrapper(object):
def __init__(self, options):
self.token = options['hq_token']
self.url = options['hq_url']
@property
def headers(self):
return {
'content-type': 'application/json',
'FRIGG_WORKER_TOKEN': self.token,
+ 'x-frigg-worker-token': self.token,
'x-frigg-worker-host': socket.getfqdn()
}
def get(self, url):
return requests.post(url, headers=self.headers)
def post(self, url, data):
return requests.post(url, data=data, headers=self.headers)
def report_run(self, endpoint, build_id, build):
response = self.post(self.url, data=build)
logger.info('Reported build to hq, hq response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
if response.status_code != 200:
logger.error('Report of build failed, response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
with open('build-{0}-hq-response.html'.format(build_id), 'w') as f:
f.write(response.text)
return response
|
974160117e2f36b12b52df13d4a35726a4ff0907
|
boxsdk/object/api_json_object.py
|
boxsdk/object/api_json_object.py
|
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
def __iter__(self):
return iter(self._response_object)
|
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
|
Remove redundant __iter__ from APIJsonObject base class
|
Remove redundant __iter__ from APIJsonObject base class
|
Python
|
apache-2.0
|
box/box-python-sdk
|
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
- def __iter__(self):
- return iter(self._response_object)
-
|
Remove redundant __iter__ from APIJsonObject base class
|
## Code Before:
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
def __iter__(self):
return iter(self._response_object)
## Instruction:
Remove redundant __iter__ from APIJsonObject base class
## Code After:
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
|
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
-
- def __iter__(self):
- return iter(self._response_object)
|
35fab0222543a2f32ef395bf6b622bad29533ceb
|
tests.py
|
tests.py
|
import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def setUp(self):
self.options = MockOptions()
def test_lazy_init(self):
launcher = Launcher(self.options, lazy=True)
self.assertIsNone(launcher.project)
def test_no_cwd(self):
project = {
'tabs': [],
}
launcher = Launcher(self.options, lazy=True)
args = launcher.build_args(project)
self.assertNotIn('--working-directory', args)
def test_cwd(self):
project = {
'cwd': '/home/test',
'tabs': [],
}
launcher = Launcher(self.options, lazy=True)
args = launcher.build_args(project)
idx = args.index('--working-directory')
self.assertEqual(args[idx + 1], project['cwd'])
def test_args_maximize(self):
project = {
'cwd': '~',
'tabs': [],
}
launcher = Launcher(self.options, lazy=True)
args = launcher.build_args(project)
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
|
import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def setUp(self):
self.options = MockOptions()
self.launcher = Launcher(self.options, lazy=True)
def test_lazy_init(self):
self.assertIsNone(self.launcher.project)
def test_no_cwd(self):
project = {
'tabs': [],
}
args = self.launcher.build_args(project)
self.assertNotIn('--working-directory', args)
def test_cwd(self):
project = {
'cwd': '/home/test',
'tabs': [],
}
args = self.launcher.build_args(project)
idx = args.index('--working-directory')
self.assertEqual(args[idx + 1], project['cwd'])
def test_args_maximize(self):
project = {
'cwd': '~',
'tabs': [],
}
args = self.launcher.build_args(project)
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
|
Create lazy launcher in setUp.
|
Create lazy launcher in setUp.
|
Python
|
mit
|
GoldenLine/gtlaunch
|
import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def setUp(self):
self.options = MockOptions()
+ self.launcher = Launcher(self.options, lazy=True)
def test_lazy_init(self):
- launcher = Launcher(self.options, lazy=True)
- self.assertIsNone(launcher.project)
+ self.assertIsNone(self.launcher.project)
def test_no_cwd(self):
project = {
'tabs': [],
}
- launcher = Launcher(self.options, lazy=True)
- args = launcher.build_args(project)
+ args = self.launcher.build_args(project)
self.assertNotIn('--working-directory', args)
def test_cwd(self):
project = {
'cwd': '/home/test',
'tabs': [],
}
- launcher = Launcher(self.options, lazy=True)
- args = launcher.build_args(project)
+ args = self.launcher.build_args(project)
idx = args.index('--working-directory')
self.assertEqual(args[idx + 1], project['cwd'])
def test_args_maximize(self):
project = {
'cwd': '~',
'tabs': [],
}
- launcher = Launcher(self.options, lazy=True)
- args = launcher.build_args(project)
+ args = self.launcher.build_args(project)
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
|
Create lazy launcher in setUp.
|
## Code Before:
import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def setUp(self):
self.options = MockOptions()
def test_lazy_init(self):
launcher = Launcher(self.options, lazy=True)
self.assertIsNone(launcher.project)
def test_no_cwd(self):
project = {
'tabs': [],
}
launcher = Launcher(self.options, lazy=True)
args = launcher.build_args(project)
self.assertNotIn('--working-directory', args)
def test_cwd(self):
project = {
'cwd': '/home/test',
'tabs': [],
}
launcher = Launcher(self.options, lazy=True)
args = launcher.build_args(project)
idx = args.index('--working-directory')
self.assertEqual(args[idx + 1], project['cwd'])
def test_args_maximize(self):
project = {
'cwd': '~',
'tabs': [],
}
launcher = Launcher(self.options, lazy=True)
args = launcher.build_args(project)
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
## Instruction:
Create lazy launcher in setUp.
## Code After:
import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def setUp(self):
self.options = MockOptions()
self.launcher = Launcher(self.options, lazy=True)
def test_lazy_init(self):
self.assertIsNone(self.launcher.project)
def test_no_cwd(self):
project = {
'tabs': [],
}
args = self.launcher.build_args(project)
self.assertNotIn('--working-directory', args)
def test_cwd(self):
project = {
'cwd': '/home/test',
'tabs': [],
}
args = self.launcher.build_args(project)
idx = args.index('--working-directory')
self.assertEqual(args[idx + 1], project['cwd'])
def test_args_maximize(self):
project = {
'cwd': '~',
'tabs': [],
}
args = self.launcher.build_args(project)
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
|
import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def setUp(self):
self.options = MockOptions()
+ self.launcher = Launcher(self.options, lazy=True)
def test_lazy_init(self):
- launcher = Launcher(self.options, lazy=True)
- self.assertIsNone(launcher.project)
+ self.assertIsNone(self.launcher.project)
? +++++
def test_no_cwd(self):
project = {
'tabs': [],
}
- launcher = Launcher(self.options, lazy=True)
- args = launcher.build_args(project)
+ args = self.launcher.build_args(project)
? +++++
self.assertNotIn('--working-directory', args)
def test_cwd(self):
project = {
'cwd': '/home/test',
'tabs': [],
}
- launcher = Launcher(self.options, lazy=True)
- args = launcher.build_args(project)
+ args = self.launcher.build_args(project)
? +++++
idx = args.index('--working-directory')
self.assertEqual(args[idx + 1], project['cwd'])
def test_args_maximize(self):
project = {
'cwd': '~',
'tabs': [],
}
- launcher = Launcher(self.options, lazy=True)
- args = launcher.build_args(project)
+ args = self.launcher.build_args(project)
? +++++
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
|
83bf2da38eb67abab9005495289eb97b58c3856a
|
ec2_instance_change_type.py
|
ec2_instance_change_type.py
|
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli()
|
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Current instance type is the same as new type. No need to do anything.')
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli()
|
Change message that detects instance type
|
Change message that detects instance type
|
Python
|
mit
|
thinhpham/aws-tools
|
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
- print('Instance type changed to %s successfully' % instance.instance_type)
+ print('Instance type changed to %s successfully' % instance.instance_type)
+ else:
+ print('Current instance type is the same as new type. No need to do anything.')
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli()
|
Change message that detects instance type
|
## Code Before:
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli()
## Instruction:
Change message that detects instance type
## Code After:
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Current instance type is the same as new type. No need to do anything.')
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli()
|
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
- print('Instance type changed to %s successfully' % instance.instance_type)
+ print('Instance type changed to %s successfully' % instance.instance_type)
? ++++
+ else:
+ print('Current instance type is the same as new type. No need to do anything.')
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli()
|
a27d3f76f194a9767022e37c83d5d18861552cfd
|
all-domains/tutorials/cracking-the-coding-interview/linked-lists-detect-a-cycle/solution.py
|
all-domains/tutorials/cracking-the-coding-interview/linked-lists-detect-a-cycle/solution.py
|
def has_cycle(node):
if hasattr(node, 'visited'):
return True
node.visited = True
if node.next is None:
return False
return has_cycle(node.next)
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
# print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
# print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
def has_cycle(node):
c = node
n = node.next
while n is not None:
if hasattr(c, 'visited'):
return True
c.visited = True
c = n.next
n = c.next
return False
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
Solve problem to detect linked list cycle
|
Solve problem to detect linked list cycle
https://www.hackerrank.com/challenges/ctci-linked-list-cycle
|
Python
|
mit
|
arvinsim/hackerrank-solutions
|
def has_cycle(node):
+ c = node
+ n = node.next
- if hasattr(node, 'visited'):
- return True
- node.visited = True
- if node.next is None:
+ while n is not None:
+ if hasattr(c, 'visited'):
+ return True
+ c.visited = True
+ c = n.next
+ n = c.next
- return False
+ return False
- return has_cycle(node.next)
+
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
- # print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
+ print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
- # print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
+ print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
Solve problem to detect linked list cycle
|
## Code Before:
def has_cycle(node):
if hasattr(node, 'visited'):
return True
node.visited = True
if node.next is None:
return False
return has_cycle(node.next)
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
# print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
# print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
## Instruction:
Solve problem to detect linked list cycle
## Code After:
def has_cycle(node):
c = node
n = node.next
while n is not None:
if hasattr(c, 'visited'):
return True
c.visited = True
c = n.next
n = c.next
return False
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
def has_cycle(node):
+ c = node
+ n = node.next
- if hasattr(node, 'visited'):
- return True
- node.visited = True
- if node.next is None:
+ while n is not None:
+ if hasattr(c, 'visited'):
+ return True
+ c.visited = True
+ c = n.next
+ n = c.next
- return False
? ----
+ return False
- return has_cycle(node.next)
+
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
- # print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
? --
+ print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
- # print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
? --
+ print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
1f10a9c4cf5e00a8290adfe6ee34542e35ffab9b
|
OpenPNM/Algorithms/__init__.py
|
OpenPNM/Algorithms/__init__.py
|
from .__GenericAlgorithm__ import GenericAlgorithm
from .__GenericLinearTransport__ import GenericLinearTransport
from .__FickianDiffusion__ import FickianDiffusion
from .__FourierConduction__ import FourierConduction
from .__OhmicConduction__ import OhmicConduction
from .__StokesFlow__ import StokesFlow
from .__OrdinaryPercolation__ import OrdinaryPercolation
from .__InvasionPercolation__ import InvasionPercolation
from .__Drainage__ import Drainage
|
from .__GenericAlgorithm__ import GenericAlgorithm
from .__GenericLinearTransport__ import GenericLinearTransport
from .__FickianDiffusion__ import FickianDiffusion
from .__FourierConduction__ import FourierConduction
from .__OhmicConduction__ import OhmicConduction
from .__StokesFlow__ import StokesFlow
from .__OrdinaryPercolation__ import OrdinaryPercolation
from .__InvasionPercolation__ import InvasionPercolation
from .__Drainage__ import Drainage
from .__InvasionPercolationTT__ import InvasionPercolationTT
|
Add new percolation alg to init - to be renamed later
|
Add new percolation alg to init - to be renamed later
|
Python
|
mit
|
TomTranter/OpenPNM,PMEAL/OpenPNM
|
from .__GenericAlgorithm__ import GenericAlgorithm
from .__GenericLinearTransport__ import GenericLinearTransport
from .__FickianDiffusion__ import FickianDiffusion
from .__FourierConduction__ import FourierConduction
from .__OhmicConduction__ import OhmicConduction
from .__StokesFlow__ import StokesFlow
from .__OrdinaryPercolation__ import OrdinaryPercolation
from .__InvasionPercolation__ import InvasionPercolation
from .__Drainage__ import Drainage
+ from .__InvasionPercolationTT__ import InvasionPercolationTT
|
Add new percolation alg to init - to be renamed later
|
## Code Before:
from .__GenericAlgorithm__ import GenericAlgorithm
from .__GenericLinearTransport__ import GenericLinearTransport
from .__FickianDiffusion__ import FickianDiffusion
from .__FourierConduction__ import FourierConduction
from .__OhmicConduction__ import OhmicConduction
from .__StokesFlow__ import StokesFlow
from .__OrdinaryPercolation__ import OrdinaryPercolation
from .__InvasionPercolation__ import InvasionPercolation
from .__Drainage__ import Drainage
## Instruction:
Add new percolation alg to init - to be renamed later
## Code After:
from .__GenericAlgorithm__ import GenericAlgorithm
from .__GenericLinearTransport__ import GenericLinearTransport
from .__FickianDiffusion__ import FickianDiffusion
from .__FourierConduction__ import FourierConduction
from .__OhmicConduction__ import OhmicConduction
from .__StokesFlow__ import StokesFlow
from .__OrdinaryPercolation__ import OrdinaryPercolation
from .__InvasionPercolation__ import InvasionPercolation
from .__Drainage__ import Drainage
from .__InvasionPercolationTT__ import InvasionPercolationTT
|
from .__GenericAlgorithm__ import GenericAlgorithm
from .__GenericLinearTransport__ import GenericLinearTransport
from .__FickianDiffusion__ import FickianDiffusion
from .__FourierConduction__ import FourierConduction
from .__OhmicConduction__ import OhmicConduction
from .__StokesFlow__ import StokesFlow
from .__OrdinaryPercolation__ import OrdinaryPercolation
from .__InvasionPercolation__ import InvasionPercolation
from .__Drainage__ import Drainage
+ from .__InvasionPercolationTT__ import InvasionPercolationTT
|
53a93d4d1c0029e5d616e225b1b86672b1e0f7c8
|
falafel/mappers/hostname.py
|
falafel/mappers/hostname.py
|
from .. import Mapper, mapper
@mapper("facts")
@mapper("hostname")
class Hostname(Mapper):
def parse_content(self, content):
fqdn = None
if len(content) == 1:
fqdn = content[0].strip()
elif len(content) > 1:
for line in content:
if line.startswith('fqdn'):
fqdn = line.split()[-1]
self.fqdn = fqdn
self.hostname = fqdn.split(".")[0] if fqdn else None
self.domain = ".".join(fqdn.split(".")[1:]) if fqdn else None
|
from .. import Mapper, mapper
@mapper("hostname")
class Hostname(Mapper):
"""Class for parsing ``hostname`` command output.
Attributes:
fqdn: The fully qualified domain name of the host. The same to
``hostname`` when domain part is not set.
hostname: The hostname.
domain: The domain get from the fqdn.
"""
def parse_content(self, content):
raw = None
if len(content) == 1:
raw = content[0].strip()
self.fqdn = raw
self.hostname = raw.split(".")[0] if raw else None
self.domain = ".".join(raw.split(".")[1:]) if raw else None
|
Remove the decorate `facts` from mapper `Hostname`
|
Remove the decorate `facts` from mapper `Hostname`
- And update the class comment
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
from .. import Mapper, mapper
- @mapper("facts")
@mapper("hostname")
class Hostname(Mapper):
+ """Class for parsing ``hostname`` command output.
+
+ Attributes:
+ fqdn: The fully qualified domain name of the host. The same to
+ ``hostname`` when domain part is not set.
+ hostname: The hostname.
+ domain: The domain get from the fqdn.
+ """
def parse_content(self, content):
- fqdn = None
+ raw = None
if len(content) == 1:
- fqdn = content[0].strip()
+ raw = content[0].strip()
- elif len(content) > 1:
- for line in content:
- if line.startswith('fqdn'):
- fqdn = line.split()[-1]
- self.fqdn = fqdn
+ self.fqdn = raw
- self.hostname = fqdn.split(".")[0] if fqdn else None
+ self.hostname = raw.split(".")[0] if raw else None
- self.domain = ".".join(fqdn.split(".")[1:]) if fqdn else None
+ self.domain = ".".join(raw.split(".")[1:]) if raw else None
|
Remove the decorate `facts` from mapper `Hostname`
|
## Code Before:
from .. import Mapper, mapper
@mapper("facts")
@mapper("hostname")
class Hostname(Mapper):
def parse_content(self, content):
fqdn = None
if len(content) == 1:
fqdn = content[0].strip()
elif len(content) > 1:
for line in content:
if line.startswith('fqdn'):
fqdn = line.split()[-1]
self.fqdn = fqdn
self.hostname = fqdn.split(".")[0] if fqdn else None
self.domain = ".".join(fqdn.split(".")[1:]) if fqdn else None
## Instruction:
Remove the decorate `facts` from mapper `Hostname`
## Code After:
from .. import Mapper, mapper
@mapper("hostname")
class Hostname(Mapper):
"""Class for parsing ``hostname`` command output.
Attributes:
fqdn: The fully qualified domain name of the host. The same to
``hostname`` when domain part is not set.
hostname: The hostname.
domain: The domain get from the fqdn.
"""
def parse_content(self, content):
raw = None
if len(content) == 1:
raw = content[0].strip()
self.fqdn = raw
self.hostname = raw.split(".")[0] if raw else None
self.domain = ".".join(raw.split(".")[1:]) if raw else None
|
from .. import Mapper, mapper
- @mapper("facts")
@mapper("hostname")
class Hostname(Mapper):
+ """Class for parsing ``hostname`` command output.
+
+ Attributes:
+ fqdn: The fully qualified domain name of the host. The same to
+ ``hostname`` when domain part is not set.
+ hostname: The hostname.
+ domain: The domain get from the fqdn.
+ """
def parse_content(self, content):
- fqdn = None
? ^^^^
+ raw = None
? ^^^
if len(content) == 1:
- fqdn = content[0].strip()
? ^^^^
+ raw = content[0].strip()
? ^^^
- elif len(content) > 1:
- for line in content:
- if line.startswith('fqdn'):
- fqdn = line.split()[-1]
- self.fqdn = fqdn
? ^^^^
+ self.fqdn = raw
? ^^^
- self.hostname = fqdn.split(".")[0] if fqdn else None
? ^^^^ ^^^^
+ self.hostname = raw.split(".")[0] if raw else None
? ^^^ ^^^
- self.domain = ".".join(fqdn.split(".")[1:]) if fqdn else None
? ^^^^ ^^^^
+ self.domain = ".".join(raw.split(".")[1:]) if raw else None
? ^^^ ^^^
|
554cbefe43ce94af4f1858c534cdb0d1e5ba965c
|
floyd/cli/auth.py
|
floyd/cli/auth.py
|
import click
import webbrowser
import floyd
from floyd.client.auth import AuthClient
from floyd.manager.auth_config import AuthConfigManager
from floyd.model.access_token import AccessToken
from floyd.log import logger as floyd_logger
@click.command()
def login():
"""
Log into Floyd via Auth0.
"""
cli_info_url = "{}/welcome".format(floyd.floyd_web_host)
click.confirm('Authentication token page will now open in your browser. Continue?', abort=True, default=True)
webbrowser.open(cli_info_url)
access_code = click.prompt('Please copy and paste the token here', type=str, hide_input=True)
user = AuthClient().get_user(access_code)
access_token = AccessToken(username=user.username,
token=access_code)
AuthConfigManager.set_access_token(access_token)
floyd_logger.info("Login Successful")
@click.command()
def logout():
"""
Logout of Floyd.
"""
AuthConfigManager.purge_access_token()
|
import click
import webbrowser
import floyd
from floyd.client.auth import AuthClient
from floyd.manager.auth_config import AuthConfigManager
from floyd.model.access_token import AccessToken
from floyd.log import logger as floyd_logger
@click.command()
@click.option('--token', is_flag=True, default=False, help='Just enter token')
def login(token):
"""
Log into Floyd via Auth0.
"""
if not token:
cli_info_url = "{}/welcome".format(floyd.floyd_web_host)
click.confirm('Authentication token page will now open in your browser. Continue?', abort=True, default=True)
webbrowser.open(cli_info_url)
access_code = click.prompt('Please copy and paste the token here', type=str, hide_input=True)
user = AuthClient().get_user(access_code)
access_token = AccessToken(username=user.username,
token=access_code)
AuthConfigManager.set_access_token(access_token)
floyd_logger.info("Login Successful")
@click.command()
def logout():
"""
Logout of Floyd.
"""
AuthConfigManager.purge_access_token()
|
Add support for --token in login command
|
Add support for --token in login command
This can be used when you already have the token and do not
want to open the browser.
|
Python
|
apache-2.0
|
mckayward/floyd-cli,mckayward/floyd-cli,houqp/floyd-cli,houqp/floyd-cli
|
import click
import webbrowser
import floyd
from floyd.client.auth import AuthClient
from floyd.manager.auth_config import AuthConfigManager
from floyd.model.access_token import AccessToken
from floyd.log import logger as floyd_logger
@click.command()
+ @click.option('--token', is_flag=True, default=False, help='Just enter token')
- def login():
+ def login(token):
"""
Log into Floyd via Auth0.
"""
+ if not token:
- cli_info_url = "{}/welcome".format(floyd.floyd_web_host)
+ cli_info_url = "{}/welcome".format(floyd.floyd_web_host)
- click.confirm('Authentication token page will now open in your browser. Continue?', abort=True, default=True)
+ click.confirm('Authentication token page will now open in your browser. Continue?', abort=True, default=True)
- webbrowser.open(cli_info_url)
+ webbrowser.open(cli_info_url)
+
access_code = click.prompt('Please copy and paste the token here', type=str, hide_input=True)
user = AuthClient().get_user(access_code)
access_token = AccessToken(username=user.username,
token=access_code)
AuthConfigManager.set_access_token(access_token)
floyd_logger.info("Login Successful")
@click.command()
def logout():
"""
Logout of Floyd.
"""
AuthConfigManager.purge_access_token()
|
Add support for --token in login command
|
## Code Before:
import click
import webbrowser
import floyd
from floyd.client.auth import AuthClient
from floyd.manager.auth_config import AuthConfigManager
from floyd.model.access_token import AccessToken
from floyd.log import logger as floyd_logger
@click.command()
def login():
"""
Log into Floyd via Auth0.
"""
cli_info_url = "{}/welcome".format(floyd.floyd_web_host)
click.confirm('Authentication token page will now open in your browser. Continue?', abort=True, default=True)
webbrowser.open(cli_info_url)
access_code = click.prompt('Please copy and paste the token here', type=str, hide_input=True)
user = AuthClient().get_user(access_code)
access_token = AccessToken(username=user.username,
token=access_code)
AuthConfigManager.set_access_token(access_token)
floyd_logger.info("Login Successful")
@click.command()
def logout():
"""
Logout of Floyd.
"""
AuthConfigManager.purge_access_token()
## Instruction:
Add support for --token in login command
## Code After:
import click
import webbrowser
import floyd
from floyd.client.auth import AuthClient
from floyd.manager.auth_config import AuthConfigManager
from floyd.model.access_token import AccessToken
from floyd.log import logger as floyd_logger
@click.command()
@click.option('--token', is_flag=True, default=False, help='Just enter token')
def login(token):
"""
Log into Floyd via Auth0.
"""
if not token:
cli_info_url = "{}/welcome".format(floyd.floyd_web_host)
click.confirm('Authentication token page will now open in your browser. Continue?', abort=True, default=True)
webbrowser.open(cli_info_url)
access_code = click.prompt('Please copy and paste the token here', type=str, hide_input=True)
user = AuthClient().get_user(access_code)
access_token = AccessToken(username=user.username,
token=access_code)
AuthConfigManager.set_access_token(access_token)
floyd_logger.info("Login Successful")
@click.command()
def logout():
"""
Logout of Floyd.
"""
AuthConfigManager.purge_access_token()
|
import click
import webbrowser
import floyd
from floyd.client.auth import AuthClient
from floyd.manager.auth_config import AuthConfigManager
from floyd.model.access_token import AccessToken
from floyd.log import logger as floyd_logger
@click.command()
+ @click.option('--token', is_flag=True, default=False, help='Just enter token')
- def login():
+ def login(token):
? +++++
"""
Log into Floyd via Auth0.
"""
+ if not token:
- cli_info_url = "{}/welcome".format(floyd.floyd_web_host)
+ cli_info_url = "{}/welcome".format(floyd.floyd_web_host)
? ++++
- click.confirm('Authentication token page will now open in your browser. Continue?', abort=True, default=True)
+ click.confirm('Authentication token page will now open in your browser. Continue?', abort=True, default=True)
? ++++
- webbrowser.open(cli_info_url)
+ webbrowser.open(cli_info_url)
? ++++
+
access_code = click.prompt('Please copy and paste the token here', type=str, hide_input=True)
user = AuthClient().get_user(access_code)
access_token = AccessToken(username=user.username,
token=access_code)
AuthConfigManager.set_access_token(access_token)
floyd_logger.info("Login Successful")
@click.command()
def logout():
"""
Logout of Floyd.
"""
AuthConfigManager.purge_access_token()
|
11feab5b49bf818e8dde90497d90dafc7ceb5183
|
src/locations/models.py
|
src/locations/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
def __unicode__(self):
return self.name
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
ordering = ['name']
def __unicode__(self):
return self.name
|
Order locations and districts by name
|
Order locations and districts by name
|
Python
|
mit
|
mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
+ ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
+ ordering = ['name']
def __unicode__(self):
return self.name
|
Order locations and districts by name
|
## Code Before:
from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
def __unicode__(self):
return self.name
## Instruction:
Order locations and districts by name
## Code After:
from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
ordering = ['name']
def __unicode__(self):
return self.name
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
+ ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
+ ordering = ['name']
def __unicode__(self):
return self.name
|
6032fb8eb10a2f6be28142c7473e03b4bc349c7c
|
partitions/registry.py
|
partitions/registry.py
|
from django.conf import settings
class Registry(object):
def __init__(self):
self._partitions = {}
def register(self, key, app_model, expression):
if not isinstance(app_model, basestring):
app_model = "%s.%s" % (
app_model._meta.app_label,
app_model._meta.object_name
)
if key in self._partitions and app_model in self._partitions[key]:
raise Exception("'%s' is already registered." % key)
if app_model.split(".")[0] not in settings.INSTALLED_APPS:
raise Exception("'%s' is not in INSTALLED_APPS" % app_model.split(".")[0])
self._partitions[key].update({app_model: expression})
def expression_for(self, key, app_model):
return self._partitions.get(key, {}).get(app_model)
registry = Registry()
def register(key, app_model, expression):
registry.register(key, app_model, expression)
|
from django.conf import settings
class Registry(object):
def __init__(self):
self._partitions = {}
def register(self, key, app_model, expression):
if not isinstance(app_model, basestring):
app_model = "%s.%s" % (
app_model._meta.app_label,
app_model._meta.object_name
)
if key in self._partitions and app_model in self._partitions[key]:
raise Exception("'%s' is already registered." % key)
if app_model.split(".")[0] not in settings.INSTALLED_APPS:
raise Exception("'%s' is not in INSTALLED_APPS" % app_model.split(".")[0])
self._partitions.update({
key: {
app_model: expression
}
})
def expression_for(self, key, app_model):
return self._partitions.get(key, {}).get(app_model)
registry = Registry()
def register(key, app_model, expression):
registry.register(key, app_model, expression)
|
Use update instead of setting key directly
|
Use update instead of setting key directly
|
Python
|
bsd-3-clause
|
eldarion/django-partitions
|
from django.conf import settings
class Registry(object):
def __init__(self):
self._partitions = {}
def register(self, key, app_model, expression):
if not isinstance(app_model, basestring):
app_model = "%s.%s" % (
app_model._meta.app_label,
app_model._meta.object_name
)
if key in self._partitions and app_model in self._partitions[key]:
raise Exception("'%s' is already registered." % key)
if app_model.split(".")[0] not in settings.INSTALLED_APPS:
raise Exception("'%s' is not in INSTALLED_APPS" % app_model.split(".")[0])
- self._partitions[key].update({app_model: expression})
+ self._partitions.update({
+ key: {
+ app_model: expression
+ }
+ })
def expression_for(self, key, app_model):
return self._partitions.get(key, {}).get(app_model)
registry = Registry()
def register(key, app_model, expression):
registry.register(key, app_model, expression)
|
Use update instead of setting key directly
|
## Code Before:
from django.conf import settings
class Registry(object):
def __init__(self):
self._partitions = {}
def register(self, key, app_model, expression):
if not isinstance(app_model, basestring):
app_model = "%s.%s" % (
app_model._meta.app_label,
app_model._meta.object_name
)
if key in self._partitions and app_model in self._partitions[key]:
raise Exception("'%s' is already registered." % key)
if app_model.split(".")[0] not in settings.INSTALLED_APPS:
raise Exception("'%s' is not in INSTALLED_APPS" % app_model.split(".")[0])
self._partitions[key].update({app_model: expression})
def expression_for(self, key, app_model):
return self._partitions.get(key, {}).get(app_model)
registry = Registry()
def register(key, app_model, expression):
registry.register(key, app_model, expression)
## Instruction:
Use update instead of setting key directly
## Code After:
from django.conf import settings
class Registry(object):
def __init__(self):
self._partitions = {}
def register(self, key, app_model, expression):
if not isinstance(app_model, basestring):
app_model = "%s.%s" % (
app_model._meta.app_label,
app_model._meta.object_name
)
if key in self._partitions and app_model in self._partitions[key]:
raise Exception("'%s' is already registered." % key)
if app_model.split(".")[0] not in settings.INSTALLED_APPS:
raise Exception("'%s' is not in INSTALLED_APPS" % app_model.split(".")[0])
self._partitions.update({
key: {
app_model: expression
}
})
def expression_for(self, key, app_model):
return self._partitions.get(key, {}).get(app_model)
registry = Registry()
def register(key, app_model, expression):
registry.register(key, app_model, expression)
|
from django.conf import settings
class Registry(object):
def __init__(self):
self._partitions = {}
def register(self, key, app_model, expression):
if not isinstance(app_model, basestring):
app_model = "%s.%s" % (
app_model._meta.app_label,
app_model._meta.object_name
)
if key in self._partitions and app_model in self._partitions[key]:
raise Exception("'%s' is already registered." % key)
if app_model.split(".")[0] not in settings.INSTALLED_APPS:
raise Exception("'%s' is not in INSTALLED_APPS" % app_model.split(".")[0])
- self._partitions[key].update({app_model: expression})
+ self._partitions.update({
+ key: {
+ app_model: expression
+ }
+ })
def expression_for(self, key, app_model):
return self._partitions.get(key, {}).get(app_model)
registry = Registry()
def register(key, app_model, expression):
registry.register(key, app_model, expression)
|
67637039b95f4030a462edb35d614bb678426dd3
|
conversion_check.py
|
conversion_check.py
|
def check_iso_mcp(input_file):
"""
Checks if MCP conversion is allowed for the given file.
MCP files are only created if the DIF has an ISO Topic Category of
"OCEANS".
"""
allowed = False
# Cannot use the following check:
# oceans_tag = '<ISO_Topic_Category>OCEANS</ISO_Topic_Category>'
# This is because some of these tags include a "uuid" attribute, so they
# will not be marked OK for conversion.
oceans_tag = '>OCEANS</ISO_Topic_Category>'
with open(input_file) as r:
content = r.read()
if 0 <= content.find(oceans_tag):
allowed = True
return allowed
|
def check_iso_mcp(input_file):
"""
Checks if MCP conversion is allowed for the given file.
MCP files are only created if the DIF has an ISO Topic Category of
"OCEANS".
"""
allowed = False
# Cannot use the following check:
# oceans_tag = '<ISO_Topic_Category>OCEANS</ISO_Topic_Category>'
# This is because some of these tags include a "uuid" attribute, so they
# will not be marked OK for conversion.
oceans_tag = '>OCEANS</ISO_Topic_Category>'
with open(input_file) as r:
content = r.read()
if 0 <= content.find(oceans_tag):
allowed = True
return allowed
def check_ands_rif_cs(file_path):
folder_path, file_name = os.path.split(file_path)
base_name, ext_name = os.path.splitext(file_name)
return not base_name.endswith('AAD_RIFCS_ISO')
|
Add ANDS RIF-CS conversion check function
|
Add ANDS RIF-CS conversion check function
|
Python
|
mit
|
AustralianAntarcticDataCentre/metadata_xml_convert,AustralianAntarcticDataCentre/metadata_xml_convert
|
def check_iso_mcp(input_file):
"""
Checks if MCP conversion is allowed for the given file.
MCP files are only created if the DIF has an ISO Topic Category of
"OCEANS".
"""
allowed = False
# Cannot use the following check:
# oceans_tag = '<ISO_Topic_Category>OCEANS</ISO_Topic_Category>'
# This is because some of these tags include a "uuid" attribute, so they
# will not be marked OK for conversion.
oceans_tag = '>OCEANS</ISO_Topic_Category>'
with open(input_file) as r:
content = r.read()
if 0 <= content.find(oceans_tag):
allowed = True
return allowed
+
+ def check_ands_rif_cs(file_path):
+ folder_path, file_name = os.path.split(file_path)
+ base_name, ext_name = os.path.splitext(file_name)
+ return not base_name.endswith('AAD_RIFCS_ISO')
+
|
Add ANDS RIF-CS conversion check function
|
## Code Before:
def check_iso_mcp(input_file):
"""
Checks if MCP conversion is allowed for the given file.
MCP files are only created if the DIF has an ISO Topic Category of
"OCEANS".
"""
allowed = False
# Cannot use the following check:
# oceans_tag = '<ISO_Topic_Category>OCEANS</ISO_Topic_Category>'
# This is because some of these tags include a "uuid" attribute, so they
# will not be marked OK for conversion.
oceans_tag = '>OCEANS</ISO_Topic_Category>'
with open(input_file) as r:
content = r.read()
if 0 <= content.find(oceans_tag):
allowed = True
return allowed
## Instruction:
Add ANDS RIF-CS conversion check function
## Code After:
def check_iso_mcp(input_file):
"""
Checks if MCP conversion is allowed for the given file.
MCP files are only created if the DIF has an ISO Topic Category of
"OCEANS".
"""
allowed = False
# Cannot use the following check:
# oceans_tag = '<ISO_Topic_Category>OCEANS</ISO_Topic_Category>'
# This is because some of these tags include a "uuid" attribute, so they
# will not be marked OK for conversion.
oceans_tag = '>OCEANS</ISO_Topic_Category>'
with open(input_file) as r:
content = r.read()
if 0 <= content.find(oceans_tag):
allowed = True
return allowed
def check_ands_rif_cs(file_path):
folder_path, file_name = os.path.split(file_path)
base_name, ext_name = os.path.splitext(file_name)
return not base_name.endswith('AAD_RIFCS_ISO')
|
def check_iso_mcp(input_file):
"""
Checks if MCP conversion is allowed for the given file.
MCP files are only created if the DIF has an ISO Topic Category of
"OCEANS".
"""
allowed = False
# Cannot use the following check:
# oceans_tag = '<ISO_Topic_Category>OCEANS</ISO_Topic_Category>'
# This is because some of these tags include a "uuid" attribute, so they
# will not be marked OK for conversion.
oceans_tag = '>OCEANS</ISO_Topic_Category>'
with open(input_file) as r:
content = r.read()
if 0 <= content.find(oceans_tag):
allowed = True
return allowed
+
+
+ def check_ands_rif_cs(file_path):
+ folder_path, file_name = os.path.split(file_path)
+ base_name, ext_name = os.path.splitext(file_name)
+ return not base_name.endswith('AAD_RIFCS_ISO')
|
b3a144e9dfba915d186fd1243515172780611689
|
models/waifu_model.py
|
models/waifu_model.py
|
from models.base_model import BaseModel
from datetime import datetime
from models.user_model import UserModel
from peewee import CharField, TextField, DateTimeField, IntegerField, ForeignKeyField
WAIFU_SHARING_STATUS_PRIVATE = 1
WAIFU_SHARING_STATUS_PUBLIC_MODERATION = 2
WAIFU_SHARING_STATUS_PUBLIC = 3
class WaifuModel(BaseModel):
class Meta:
db_table = 'waifus'
name = CharField(max_length=128, null=False)
description = TextField(null=False)
pic = CharField(max_length=128, null=False)
created_at = DateTimeField(null=False, default=datetime.now)
updated_at = DateTimeField(null=False, default=datetime.now)
rating = IntegerField(null=False, default=0)
sharing_status = IntegerField(null=False, default=WAIFU_SHARING_STATUS_PRIVATE)
owner = ForeignKeyField(UserModel, related_name='waifus_created_by_me')
|
from models.base_model import BaseModel
from datetime import datetime
from models.user_model import UserModel
from peewee import CharField, TextField, DateTimeField, IntegerField, ForeignKeyField
WAIFU_SHARING_STATUS_PRIVATE = 1
WAIFU_SHARING_STATUS_PUBLIC_MODERATION = 2
WAIFU_SHARING_STATUS_PUBLIC = 3
class WaifuModel(BaseModel):
class Meta:
db_table = 'waifus'
name = CharField(max_length=128, null=False)
description = TextField(null=False)
pic = CharField(max_length=128, null=False)
created_at = DateTimeField(null=False, default=datetime.now)
updated_at = DateTimeField(null=False, default=datetime.now)
rating = IntegerField(null=False, default=0)
sharing_status = IntegerField(null=False, default=WAIFU_SHARING_STATUS_PRIVATE)
owner = ForeignKeyField(UserModel, related_name='waifus_created_by_me')
def to_json(self):
json = super(WaifuModel, self).to_json()
json['users_count'] = self.users.count()
return json
|
Add users count to json representation.
|
Add users count to json representation.
|
Python
|
cc0-1.0
|
sketchturnerr/WaifuSim-backend,sketchturnerr/WaifuSim-backend
|
from models.base_model import BaseModel
from datetime import datetime
from models.user_model import UserModel
from peewee import CharField, TextField, DateTimeField, IntegerField, ForeignKeyField
WAIFU_SHARING_STATUS_PRIVATE = 1
WAIFU_SHARING_STATUS_PUBLIC_MODERATION = 2
WAIFU_SHARING_STATUS_PUBLIC = 3
class WaifuModel(BaseModel):
class Meta:
db_table = 'waifus'
name = CharField(max_length=128, null=False)
description = TextField(null=False)
pic = CharField(max_length=128, null=False)
created_at = DateTimeField(null=False, default=datetime.now)
updated_at = DateTimeField(null=False, default=datetime.now)
rating = IntegerField(null=False, default=0)
sharing_status = IntegerField(null=False, default=WAIFU_SHARING_STATUS_PRIVATE)
owner = ForeignKeyField(UserModel, related_name='waifus_created_by_me')
+ def to_json(self):
+ json = super(WaifuModel, self).to_json()
+ json['users_count'] = self.users.count()
+ return json
+
|
Add users count to json representation.
|
## Code Before:
from models.base_model import BaseModel
from datetime import datetime
from models.user_model import UserModel
from peewee import CharField, TextField, DateTimeField, IntegerField, ForeignKeyField
WAIFU_SHARING_STATUS_PRIVATE = 1
WAIFU_SHARING_STATUS_PUBLIC_MODERATION = 2
WAIFU_SHARING_STATUS_PUBLIC = 3
class WaifuModel(BaseModel):
class Meta:
db_table = 'waifus'
name = CharField(max_length=128, null=False)
description = TextField(null=False)
pic = CharField(max_length=128, null=False)
created_at = DateTimeField(null=False, default=datetime.now)
updated_at = DateTimeField(null=False, default=datetime.now)
rating = IntegerField(null=False, default=0)
sharing_status = IntegerField(null=False, default=WAIFU_SHARING_STATUS_PRIVATE)
owner = ForeignKeyField(UserModel, related_name='waifus_created_by_me')
## Instruction:
Add users count to json representation.
## Code After:
from models.base_model import BaseModel
from datetime import datetime
from models.user_model import UserModel
from peewee import CharField, TextField, DateTimeField, IntegerField, ForeignKeyField
WAIFU_SHARING_STATUS_PRIVATE = 1
WAIFU_SHARING_STATUS_PUBLIC_MODERATION = 2
WAIFU_SHARING_STATUS_PUBLIC = 3
class WaifuModel(BaseModel):
class Meta:
db_table = 'waifus'
name = CharField(max_length=128, null=False)
description = TextField(null=False)
pic = CharField(max_length=128, null=False)
created_at = DateTimeField(null=False, default=datetime.now)
updated_at = DateTimeField(null=False, default=datetime.now)
rating = IntegerField(null=False, default=0)
sharing_status = IntegerField(null=False, default=WAIFU_SHARING_STATUS_PRIVATE)
owner = ForeignKeyField(UserModel, related_name='waifus_created_by_me')
def to_json(self):
json = super(WaifuModel, self).to_json()
json['users_count'] = self.users.count()
return json
|
from models.base_model import BaseModel
from datetime import datetime
from models.user_model import UserModel
from peewee import CharField, TextField, DateTimeField, IntegerField, ForeignKeyField
WAIFU_SHARING_STATUS_PRIVATE = 1
WAIFU_SHARING_STATUS_PUBLIC_MODERATION = 2
WAIFU_SHARING_STATUS_PUBLIC = 3
class WaifuModel(BaseModel):
class Meta:
db_table = 'waifus'
name = CharField(max_length=128, null=False)
description = TextField(null=False)
pic = CharField(max_length=128, null=False)
created_at = DateTimeField(null=False, default=datetime.now)
updated_at = DateTimeField(null=False, default=datetime.now)
rating = IntegerField(null=False, default=0)
sharing_status = IntegerField(null=False, default=WAIFU_SHARING_STATUS_PRIVATE)
owner = ForeignKeyField(UserModel, related_name='waifus_created_by_me')
+
+ def to_json(self):
+ json = super(WaifuModel, self).to_json()
+ json['users_count'] = self.users.count()
+ return json
|
dd0cef83edbd3849484b7fc0ec5cb6372f99bb3a
|
batchflow/models/utils.py
|
batchflow/models/utils.py
|
""" Auxiliary functions for models """
def unpack_args(args, layer_no, layers_max):
""" Return layer parameters """
new_args = {}
for arg in args:
if isinstance(args[arg], list) and layers_max > 1:
if len(args[arg]) >= layers_max:
arg_value = args[arg][layer_no]
else:
arg_value = args[arg]
else:
arg_value = args[arg]
new_args.update({arg: arg_value})
return new_args
def unpack_fn_from_config(param, config=None):
""" Return params from config """
par = config.get(param)
if par is None:
return None, {}
if isinstance(par, (tuple, list)):
if len(par) == 0:
par_name = None
elif len(par) == 1:
par_name, par_args = par[0], {}
elif len(par) == 2:
par_name, par_args = par
else:
par_name, par_args = par[0], par[1:]
elif isinstance(par, dict):
par = par.copy()
par_name, par_args = par.pop('name', None), par
else:
par_name, par_args = par, {}
return par_name, par_args
|
""" Auxiliary functions for models """
def unpack_args(args, layer_no, layers_max):
""" Return layer parameters """
new_args = {}
for arg in args:
if isinstance(args[arg], list):
if len(args[arg]) >= layers_max:
arg_value = args[arg][layer_no]
else:
arg_value = args[arg]
else:
arg_value = args[arg]
new_args.update({arg: arg_value})
return new_args
def unpack_fn_from_config(param, config=None):
""" Return params from config """
par = config.get(param)
if par is None:
return None, {}
if isinstance(par, (tuple, list)):
if len(par) == 0:
par_name = None
elif len(par) == 1:
par_name, par_args = par[0], {}
elif len(par) == 2:
par_name, par_args = par
else:
par_name, par_args = par[0], par[1:]
elif isinstance(par, dict):
par = par.copy()
par_name, par_args = par.pop('name', None), par
else:
par_name, par_args = par, {}
return par_name, par_args
|
Allow for 1 arg in a list
|
Allow for 1 arg in a list
|
Python
|
apache-2.0
|
analysiscenter/dataset
|
""" Auxiliary functions for models """
def unpack_args(args, layer_no, layers_max):
""" Return layer parameters """
new_args = {}
for arg in args:
- if isinstance(args[arg], list) and layers_max > 1:
+ if isinstance(args[arg], list):
if len(args[arg]) >= layers_max:
arg_value = args[arg][layer_no]
else:
arg_value = args[arg]
else:
arg_value = args[arg]
new_args.update({arg: arg_value})
return new_args
def unpack_fn_from_config(param, config=None):
""" Return params from config """
par = config.get(param)
if par is None:
return None, {}
if isinstance(par, (tuple, list)):
if len(par) == 0:
par_name = None
elif len(par) == 1:
par_name, par_args = par[0], {}
elif len(par) == 2:
par_name, par_args = par
else:
par_name, par_args = par[0], par[1:]
elif isinstance(par, dict):
par = par.copy()
par_name, par_args = par.pop('name', None), par
else:
par_name, par_args = par, {}
return par_name, par_args
|
Allow for 1 arg in a list
|
## Code Before:
""" Auxiliary functions for models """
def unpack_args(args, layer_no, layers_max):
""" Return layer parameters """
new_args = {}
for arg in args:
if isinstance(args[arg], list) and layers_max > 1:
if len(args[arg]) >= layers_max:
arg_value = args[arg][layer_no]
else:
arg_value = args[arg]
else:
arg_value = args[arg]
new_args.update({arg: arg_value})
return new_args
def unpack_fn_from_config(param, config=None):
""" Return params from config """
par = config.get(param)
if par is None:
return None, {}
if isinstance(par, (tuple, list)):
if len(par) == 0:
par_name = None
elif len(par) == 1:
par_name, par_args = par[0], {}
elif len(par) == 2:
par_name, par_args = par
else:
par_name, par_args = par[0], par[1:]
elif isinstance(par, dict):
par = par.copy()
par_name, par_args = par.pop('name', None), par
else:
par_name, par_args = par, {}
return par_name, par_args
## Instruction:
Allow for 1 arg in a list
## Code After:
""" Auxiliary functions for models """
def unpack_args(args, layer_no, layers_max):
""" Return layer parameters """
new_args = {}
for arg in args:
if isinstance(args[arg], list):
if len(args[arg]) >= layers_max:
arg_value = args[arg][layer_no]
else:
arg_value = args[arg]
else:
arg_value = args[arg]
new_args.update({arg: arg_value})
return new_args
def unpack_fn_from_config(param, config=None):
""" Return params from config """
par = config.get(param)
if par is None:
return None, {}
if isinstance(par, (tuple, list)):
if len(par) == 0:
par_name = None
elif len(par) == 1:
par_name, par_args = par[0], {}
elif len(par) == 2:
par_name, par_args = par
else:
par_name, par_args = par[0], par[1:]
elif isinstance(par, dict):
par = par.copy()
par_name, par_args = par.pop('name', None), par
else:
par_name, par_args = par, {}
return par_name, par_args
|
""" Auxiliary functions for models """
def unpack_args(args, layer_no, layers_max):
""" Return layer parameters """
new_args = {}
for arg in args:
- if isinstance(args[arg], list) and layers_max > 1:
? -------------------
+ if isinstance(args[arg], list):
if len(args[arg]) >= layers_max:
arg_value = args[arg][layer_no]
else:
arg_value = args[arg]
else:
arg_value = args[arg]
new_args.update({arg: arg_value})
return new_args
def unpack_fn_from_config(param, config=None):
""" Return params from config """
par = config.get(param)
if par is None:
return None, {}
if isinstance(par, (tuple, list)):
if len(par) == 0:
par_name = None
elif len(par) == 1:
par_name, par_args = par[0], {}
elif len(par) == 2:
par_name, par_args = par
else:
par_name, par_args = par[0], par[1:]
elif isinstance(par, dict):
par = par.copy()
par_name, par_args = par.pop('name', None), par
else:
par_name, par_args = par, {}
return par_name, par_args
|
2823b35d3bf3d521ae3c9769e2696455bbed8318
|
scriptorium/config.py
|
scriptorium/config.py
|
"""Configuration related functionality for scriptorium."""
import os
import yaml
import scriptorium
_DEFAULT_DIR = os.path.join(os.path.expanduser("~"), '.scriptorium')
_CFG_FILE = os.path.join(_DEFAULT_DIR, 'config')
_DEFAULT_CFG = {
'TEMPLATE_DIR': os.path.join(_DEFAULT_DIR, 'templates'),
'LATEX_CMD': 'xelatex'
}
def read_config():
"""Read configuration values for scriptorium."""
try:
with open(_CFG_FILE, 'Ur') as cfg_fp:
cfg = yaml.load(cfg_fp)
scriptorium.CONFIG.update(cfg)
except EnvironmentError:
if not os.path.exists(scriptorium.CONFIG['TEMPLATE_DIR']):
os.makedirs(scriptorium.CONFIG['TEMPLATE_DIR'])
#Save configuration from first time
save_config()
def save_config():
"""Save configuration values for scriptorium."""
with open(_CFG_FILE, 'w') as cfg_fp:
yaml.dump(scriptorium.CONFIG, cfg_fp)
|
"""Configuration related functionality for scriptorium."""
import os
import yaml
import scriptorium
_DEFAULT_DIR = os.path.join(os.path.expanduser("~"), '.scriptorium')
_CFG_FILE = os.path.join(_DEFAULT_DIR, 'config')
_DEFAULT_CFG = {
'TEMPLATE_DIR': os.path.join(_DEFAULT_DIR, 'templates'),
'LATEX_CMD': 'xelatex'
}
def _sanitize_paths(cfg):
"""Ensure that paths in configuration options have ~ symbols expanded."""
cfg['TEMPLATE_DIR'] = os.path.expanduser(cfg['TEMPLATE_DIR'])
def read_config():
"""Read configuration values for scriptorium."""
try:
with open(_CFG_FILE, 'Ur') as cfg_fp:
cfg = yaml.load(cfg_fp)
scriptorium.CONFIG.update(cfg)
_sanitize_paths(scriptorium.CONFIG)
except EnvironmentError:
if not os.path.exists(scriptorium.CONFIG['TEMPLATE_DIR']):
os.makedirs(scriptorium.CONFIG['TEMPLATE_DIR'])
#Save configuration from first time
save_config()
def save_config():
"""Save configuration values for scriptorium."""
_sanitize_paths(scriptorium.CONFIG)
with open(_CFG_FILE, 'w') as cfg_fp:
yaml.dump(scriptorium.CONFIG, cfg_fp)
|
Expand home directory wildcards to ensure path is valid
|
Expand home directory wildcards to ensure path is valid
|
Python
|
mit
|
jasedit/scriptorium,jasedit/papers_base
|
"""Configuration related functionality for scriptorium."""
import os
import yaml
import scriptorium
_DEFAULT_DIR = os.path.join(os.path.expanduser("~"), '.scriptorium')
_CFG_FILE = os.path.join(_DEFAULT_DIR, 'config')
_DEFAULT_CFG = {
'TEMPLATE_DIR': os.path.join(_DEFAULT_DIR, 'templates'),
'LATEX_CMD': 'xelatex'
}
+ def _sanitize_paths(cfg):
+ """Ensure that paths in configuration options have ~ symbols expanded."""
+ cfg['TEMPLATE_DIR'] = os.path.expanduser(cfg['TEMPLATE_DIR'])
+
def read_config():
"""Read configuration values for scriptorium."""
try:
with open(_CFG_FILE, 'Ur') as cfg_fp:
cfg = yaml.load(cfg_fp)
scriptorium.CONFIG.update(cfg)
+ _sanitize_paths(scriptorium.CONFIG)
except EnvironmentError:
if not os.path.exists(scriptorium.CONFIG['TEMPLATE_DIR']):
os.makedirs(scriptorium.CONFIG['TEMPLATE_DIR'])
#Save configuration from first time
save_config()
def save_config():
"""Save configuration values for scriptorium."""
+ _sanitize_paths(scriptorium.CONFIG)
with open(_CFG_FILE, 'w') as cfg_fp:
yaml.dump(scriptorium.CONFIG, cfg_fp)
|
Expand home directory wildcards to ensure path is valid
|
## Code Before:
"""Configuration related functionality for scriptorium."""
import os
import yaml
import scriptorium
_DEFAULT_DIR = os.path.join(os.path.expanduser("~"), '.scriptorium')
_CFG_FILE = os.path.join(_DEFAULT_DIR, 'config')
_DEFAULT_CFG = {
'TEMPLATE_DIR': os.path.join(_DEFAULT_DIR, 'templates'),
'LATEX_CMD': 'xelatex'
}
def read_config():
"""Read configuration values for scriptorium."""
try:
with open(_CFG_FILE, 'Ur') as cfg_fp:
cfg = yaml.load(cfg_fp)
scriptorium.CONFIG.update(cfg)
except EnvironmentError:
if not os.path.exists(scriptorium.CONFIG['TEMPLATE_DIR']):
os.makedirs(scriptorium.CONFIG['TEMPLATE_DIR'])
#Save configuration from first time
save_config()
def save_config():
"""Save configuration values for scriptorium."""
with open(_CFG_FILE, 'w') as cfg_fp:
yaml.dump(scriptorium.CONFIG, cfg_fp)
## Instruction:
Expand home directory wildcards to ensure path is valid
## Code After:
"""Configuration related functionality for scriptorium."""
import os
import yaml
import scriptorium
_DEFAULT_DIR = os.path.join(os.path.expanduser("~"), '.scriptorium')
_CFG_FILE = os.path.join(_DEFAULT_DIR, 'config')
_DEFAULT_CFG = {
'TEMPLATE_DIR': os.path.join(_DEFAULT_DIR, 'templates'),
'LATEX_CMD': 'xelatex'
}
def _sanitize_paths(cfg):
"""Ensure that paths in configuration options have ~ symbols expanded."""
cfg['TEMPLATE_DIR'] = os.path.expanduser(cfg['TEMPLATE_DIR'])
def read_config():
"""Read configuration values for scriptorium."""
try:
with open(_CFG_FILE, 'Ur') as cfg_fp:
cfg = yaml.load(cfg_fp)
scriptorium.CONFIG.update(cfg)
_sanitize_paths(scriptorium.CONFIG)
except EnvironmentError:
if not os.path.exists(scriptorium.CONFIG['TEMPLATE_DIR']):
os.makedirs(scriptorium.CONFIG['TEMPLATE_DIR'])
#Save configuration from first time
save_config()
def save_config():
"""Save configuration values for scriptorium."""
_sanitize_paths(scriptorium.CONFIG)
with open(_CFG_FILE, 'w') as cfg_fp:
yaml.dump(scriptorium.CONFIG, cfg_fp)
|
"""Configuration related functionality for scriptorium."""
import os
import yaml
import scriptorium
_DEFAULT_DIR = os.path.join(os.path.expanduser("~"), '.scriptorium')
_CFG_FILE = os.path.join(_DEFAULT_DIR, 'config')
_DEFAULT_CFG = {
'TEMPLATE_DIR': os.path.join(_DEFAULT_DIR, 'templates'),
'LATEX_CMD': 'xelatex'
}
+ def _sanitize_paths(cfg):
+ """Ensure that paths in configuration options have ~ symbols expanded."""
+ cfg['TEMPLATE_DIR'] = os.path.expanduser(cfg['TEMPLATE_DIR'])
+
def read_config():
"""Read configuration values for scriptorium."""
try:
with open(_CFG_FILE, 'Ur') as cfg_fp:
cfg = yaml.load(cfg_fp)
scriptorium.CONFIG.update(cfg)
+ _sanitize_paths(scriptorium.CONFIG)
except EnvironmentError:
if not os.path.exists(scriptorium.CONFIG['TEMPLATE_DIR']):
os.makedirs(scriptorium.CONFIG['TEMPLATE_DIR'])
#Save configuration from first time
save_config()
def save_config():
"""Save configuration values for scriptorium."""
+ _sanitize_paths(scriptorium.CONFIG)
with open(_CFG_FILE, 'w') as cfg_fp:
yaml.dump(scriptorium.CONFIG, cfg_fp)
|
1ed6c3f6d79aca5d647e8ff8332096c4fc111548
|
neuroimaging/utils/tests/test_utils.py
|
neuroimaging/utils/tests/test_utils.py
|
from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self): pass
#print "TestCase initialization..."
def test_foo(self): pass
#print "testing foo"
def test_bar(self): pass
#print "testing bar"
if __name__ == '__main__':
NumpyTest.main()
|
from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self):
pass
#print "TestCase initialization..."
def test_foo(self):
self.fail('neuroimaging.utils, odict, path, etc... have _NO_ tests!')
if __name__ == '__main__':
NumpyTest().run()
|
Fix test example so it runs.
|
BUG: Fix test example so it runs.
|
Python
|
bsd-3-clause
|
yarikoptic/NiPy-OLD,yarikoptic/NiPy-OLD
|
from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
- def setUp(self): pass
+ def setUp(self):
+ pass
#print "TestCase initialization..."
- def test_foo(self): pass
+ def test_foo(self):
+ self.fail('neuroimaging.utils, odict, path, etc... have _NO_ tests!')
- #print "testing foo"
-
- def test_bar(self): pass
- #print "testing bar"
-
if __name__ == '__main__':
- NumpyTest.main()
+ NumpyTest().run()
|
Fix test example so it runs.
|
## Code Before:
from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self): pass
#print "TestCase initialization..."
def test_foo(self): pass
#print "testing foo"
def test_bar(self): pass
#print "testing bar"
if __name__ == '__main__':
NumpyTest.main()
## Instruction:
Fix test example so it runs.
## Code After:
from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self):
pass
#print "TestCase initialization..."
def test_foo(self):
self.fail('neuroimaging.utils, odict, path, etc... have _NO_ tests!')
if __name__ == '__main__':
NumpyTest().run()
|
from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
- def setUp(self): pass
? -----
+ def setUp(self):
+ pass
#print "TestCase initialization..."
- def test_foo(self): pass
? -----
+ def test_foo(self):
+ self.fail('neuroimaging.utils, odict, path, etc... have _NO_ tests!')
- #print "testing foo"
-
- def test_bar(self): pass
- #print "testing bar"
-
if __name__ == '__main__':
- NumpyTest.main()
? ^^^
+ NumpyTest().run()
? ++ ^^
|
5a5ba8bbd484f427260f699101e5e754e4a6c5d1
|
phy/utils/tests/test_color.py
|
phy/utils/tests/test_color.py
|
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import _random_color, _is_bright, _random_bright_color
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
assert _is_bright(_random_bright_color())
|
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import (_random_color, _is_bright, _random_bright_color,
_selected_clusters_colors,
)
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
for _ in range(10):
assert _is_bright(_random_bright_color())
def test_selected_clusters_colors():
assert _selected_clusters_colors().ndim == 2
assert len(_selected_clusters_colors(3)) == 3
assert len(_selected_clusters_colors(10)) == 10
|
Increase coverage in color module
|
Increase coverage in color module
|
Python
|
bsd-3-clause
|
rossant/phy,rossant/phy,rossant/phy,kwikteam/phy,kwikteam/phy,kwikteam/phy
|
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
- from .._color import _random_color, _is_bright, _random_bright_color
+ from .._color import (_random_color, _is_bright, _random_bright_color,
+ _selected_clusters_colors,
+ )
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
+ for _ in range(10):
- assert _is_bright(_random_bright_color())
+ assert _is_bright(_random_bright_color())
+
+ def test_selected_clusters_colors():
+ assert _selected_clusters_colors().ndim == 2
+ assert len(_selected_clusters_colors(3)) == 3
+ assert len(_selected_clusters_colors(10)) == 10
+
|
Increase coverage in color module
|
## Code Before:
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import _random_color, _is_bright, _random_bright_color
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
assert _is_bright(_random_bright_color())
## Instruction:
Increase coverage in color module
## Code After:
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import (_random_color, _is_bright, _random_bright_color,
_selected_clusters_colors,
)
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
for _ in range(10):
assert _is_bright(_random_bright_color())
def test_selected_clusters_colors():
assert _selected_clusters_colors().ndim == 2
assert len(_selected_clusters_colors(3)) == 3
assert len(_selected_clusters_colors(10)) == 10
|
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
- from .._color import _random_color, _is_bright, _random_bright_color
+ from .._color import (_random_color, _is_bright, _random_bright_color,
? + +
+ _selected_clusters_colors,
+ )
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
+ for _ in range(10):
- assert _is_bright(_random_bright_color())
+ assert _is_bright(_random_bright_color())
? ++++
+
+
+ def test_selected_clusters_colors():
+ assert _selected_clusters_colors().ndim == 2
+ assert len(_selected_clusters_colors(3)) == 3
+ assert len(_selected_clusters_colors(10)) == 10
|
5f27e570a369fbb408a48a567064a96f1ceac277
|
tests/commands/project/utils.py
|
tests/commands/project/utils.py
|
from uuid import uuid4
import requests_mock
from tests.utils import get_project_list_data
from valohai_cli.utils import get_random_string
def get_project_mock(create_project_name=None, existing_projects=None):
username = get_random_string()
m = requests_mock.mock()
if isinstance(existing_projects, int):
existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)])
if existing_projects is not None:
m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects)
if create_project_name:
m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: {
'id': str(uuid4()),
'name': create_project_name,
'owner': {
'id': 8,
'username': username,
}
})
m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username])
return m
|
from uuid import uuid4
import requests_mock
from tests.utils import get_project_list_data
from valohai_cli.utils import get_random_string
def get_project_mock(create_project_name=None, existing_projects=None):
username = get_random_string()
project_id = uuid4()
m = requests_mock.mock()
if isinstance(existing_projects, int):
existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)])
if existing_projects is not None:
m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects)
if create_project_name:
m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: {
'id': str(project_id),
'name': create_project_name,
'owner': {
'id': 8,
'username': username,
}
})
m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username])
m.get(f'https://app.valohai.com/api/v0/projects/{project_id}/', json={
'id': str(project_id),
'yaml_path': 'valohai.yaml',
})
return m
|
Add a mock API path for project details, used in e.g. test_init
|
Add a mock API path for project details, used in e.g. test_init
|
Python
|
mit
|
valohai/valohai-cli
|
from uuid import uuid4
import requests_mock
from tests.utils import get_project_list_data
from valohai_cli.utils import get_random_string
def get_project_mock(create_project_name=None, existing_projects=None):
username = get_random_string()
+ project_id = uuid4()
m = requests_mock.mock()
if isinstance(existing_projects, int):
existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)])
if existing_projects is not None:
m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects)
if create_project_name:
m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: {
- 'id': str(uuid4()),
+ 'id': str(project_id),
'name': create_project_name,
'owner': {
'id': 8,
'username': username,
}
})
m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username])
+ m.get(f'https://app.valohai.com/api/v0/projects/{project_id}/', json={
+ 'id': str(project_id),
+ 'yaml_path': 'valohai.yaml',
+ })
return m
|
Add a mock API path for project details, used in e.g. test_init
|
## Code Before:
from uuid import uuid4
import requests_mock
from tests.utils import get_project_list_data
from valohai_cli.utils import get_random_string
def get_project_mock(create_project_name=None, existing_projects=None):
username = get_random_string()
m = requests_mock.mock()
if isinstance(existing_projects, int):
existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)])
if existing_projects is not None:
m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects)
if create_project_name:
m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: {
'id': str(uuid4()),
'name': create_project_name,
'owner': {
'id': 8,
'username': username,
}
})
m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username])
return m
## Instruction:
Add a mock API path for project details, used in e.g. test_init
## Code After:
from uuid import uuid4
import requests_mock
from tests.utils import get_project_list_data
from valohai_cli.utils import get_random_string
def get_project_mock(create_project_name=None, existing_projects=None):
username = get_random_string()
project_id = uuid4()
m = requests_mock.mock()
if isinstance(existing_projects, int):
existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)])
if existing_projects is not None:
m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects)
if create_project_name:
m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: {
'id': str(project_id),
'name': create_project_name,
'owner': {
'id': 8,
'username': username,
}
})
m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username])
m.get(f'https://app.valohai.com/api/v0/projects/{project_id}/', json={
'id': str(project_id),
'yaml_path': 'valohai.yaml',
})
return m
|
from uuid import uuid4
import requests_mock
from tests.utils import get_project_list_data
from valohai_cli.utils import get_random_string
def get_project_mock(create_project_name=None, existing_projects=None):
username = get_random_string()
+ project_id = uuid4()
m = requests_mock.mock()
if isinstance(existing_projects, int):
existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)])
if existing_projects is not None:
m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects)
if create_project_name:
m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: {
- 'id': str(uuid4()),
? ^^ ---
+ 'id': str(project_id),
? ^^^^^^^^
'name': create_project_name,
'owner': {
'id': 8,
'username': username,
}
})
m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username])
+ m.get(f'https://app.valohai.com/api/v0/projects/{project_id}/', json={
+ 'id': str(project_id),
+ 'yaml_path': 'valohai.yaml',
+ })
return m
|
ea62a1cd9642dbff69cbfae3f8b540604a8a8fca
|
mine/__init__.py
|
mine/__init__.py
|
"""Package for mine."""
import sys
__project__ = 'mine'
__version__ = '0.1'
CLI = 'mine'
VERSION = __project__ + '-' + __version__
DESCRIPTION = "Manages running applications across multiple computers."
PYTHON_VERSION = 3, 3
if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test)
exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
|
"""Package for mine."""
import sys
__project__ = 'mine'
__version__ = '0.1'
CLI = 'mine'
VERSION = __project__ + '-' + __version__
DESCRIPTION = "For applications that haven't learned to share."
PYTHON_VERSION = 3, 3
if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test)
exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
|
Update headline to match GitHub
|
Update headline to match GitHub
|
Python
|
mit
|
jacebrowning/mine
|
"""Package for mine."""
import sys
__project__ = 'mine'
__version__ = '0.1'
CLI = 'mine'
VERSION = __project__ + '-' + __version__
- DESCRIPTION = "Manages running applications across multiple computers."
+ DESCRIPTION = "For applications that haven't learned to share."
PYTHON_VERSION = 3, 3
if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test)
exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
|
Update headline to match GitHub
|
## Code Before:
"""Package for mine."""
import sys
__project__ = 'mine'
__version__ = '0.1'
CLI = 'mine'
VERSION = __project__ + '-' + __version__
DESCRIPTION = "Manages running applications across multiple computers."
PYTHON_VERSION = 3, 3
if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test)
exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
## Instruction:
Update headline to match GitHub
## Code After:
"""Package for mine."""
import sys
__project__ = 'mine'
__version__ = '0.1'
CLI = 'mine'
VERSION = __project__ + '-' + __version__
DESCRIPTION = "For applications that haven't learned to share."
PYTHON_VERSION = 3, 3
if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test)
exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
|
"""Package for mine."""
import sys
__project__ = 'mine'
__version__ = '0.1'
CLI = 'mine'
VERSION = __project__ + '-' + __version__
- DESCRIPTION = "Manages running applications across multiple computers."
+ DESCRIPTION = "For applications that haven't learned to share."
PYTHON_VERSION = 3, 3
if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test)
exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
|
4b6c27e02667fe6f5208b5b5dfa1f5dafe112c30
|
luigi/tasks/export/search/chunk.py
|
luigi/tasks/export/search/chunk.py
|
import os
import luigi
from luigi import LocalTarget
from tasks.config import db
from tasks.config import output
from rnacentral.db import cursor
from rnacentral.search import exporter
class SearchChunkTask(luigi.Task): # pylint: disable=R0904
"""
This is a task that will create an xml export for the given range of UPI's.
"""
min = luigi.IntParameter()
max = luigi.IntParameter()
def output(self):
config = output()
filepattern = 'xml_export_{min}_{max}.xml'.format(
min=self.min,
max=self.max,
)
filename = os.path.join(config.search_files, filepattern)
return LocalTarget(filename)
def run(self):
with self.output().open('w') as raw:
with cursor(db()) as cur:
results = exporter.range(cur, self.min, self.max)
exporter.write(raw, results)
|
import os
import luigi
from luigi import LocalTarget
from tasks.config import db
from tasks.config import output
from rnacentral.search import exporter
class SearchChunkTask(luigi.Task): # pylint: disable=R0904
"""
This is a task that will create an xml export for the given range of UPI's.
"""
min = luigi.IntParameter()
max = luigi.IntParameter()
def output(self):
config = output()
filepattern = 'xml_export_{min}_{max}.xml'.format(
min=self.min,
max=self.max,
)
filename = os.path.join(config.search_files, filepattern)
return LocalTarget(filename)
def run(self):
with self.output().open('w') as raw:
results = exporter.range(db(), self.min, self.max)
exporter.write(raw, results)
|
Use new psql based export
|
Use new psql based export
This changes the function arguments to use.
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
import os
import luigi
from luigi import LocalTarget
from tasks.config import db
from tasks.config import output
- from rnacentral.db import cursor
from rnacentral.search import exporter
class SearchChunkTask(luigi.Task): # pylint: disable=R0904
"""
This is a task that will create an xml export for the given range of UPI's.
"""
min = luigi.IntParameter()
max = luigi.IntParameter()
def output(self):
config = output()
filepattern = 'xml_export_{min}_{max}.xml'.format(
min=self.min,
max=self.max,
)
filename = os.path.join(config.search_files, filepattern)
return LocalTarget(filename)
def run(self):
with self.output().open('w') as raw:
- with cursor(db()) as cur:
- results = exporter.range(cur, self.min, self.max)
+ results = exporter.range(db(), self.min, self.max)
- exporter.write(raw, results)
+ exporter.write(raw, results)
|
Use new psql based export
|
## Code Before:
import os
import luigi
from luigi import LocalTarget
from tasks.config import db
from tasks.config import output
from rnacentral.db import cursor
from rnacentral.search import exporter
class SearchChunkTask(luigi.Task): # pylint: disable=R0904
"""
This is a task that will create an xml export for the given range of UPI's.
"""
min = luigi.IntParameter()
max = luigi.IntParameter()
def output(self):
config = output()
filepattern = 'xml_export_{min}_{max}.xml'.format(
min=self.min,
max=self.max,
)
filename = os.path.join(config.search_files, filepattern)
return LocalTarget(filename)
def run(self):
with self.output().open('w') as raw:
with cursor(db()) as cur:
results = exporter.range(cur, self.min, self.max)
exporter.write(raw, results)
## Instruction:
Use new psql based export
## Code After:
import os
import luigi
from luigi import LocalTarget
from tasks.config import db
from tasks.config import output
from rnacentral.search import exporter
class SearchChunkTask(luigi.Task): # pylint: disable=R0904
"""
This is a task that will create an xml export for the given range of UPI's.
"""
min = luigi.IntParameter()
max = luigi.IntParameter()
def output(self):
config = output()
filepattern = 'xml_export_{min}_{max}.xml'.format(
min=self.min,
max=self.max,
)
filename = os.path.join(config.search_files, filepattern)
return LocalTarget(filename)
def run(self):
with self.output().open('w') as raw:
results = exporter.range(db(), self.min, self.max)
exporter.write(raw, results)
|
import os
import luigi
from luigi import LocalTarget
from tasks.config import db
from tasks.config import output
- from rnacentral.db import cursor
from rnacentral.search import exporter
class SearchChunkTask(luigi.Task): # pylint: disable=R0904
"""
This is a task that will create an xml export for the given range of UPI's.
"""
min = luigi.IntParameter()
max = luigi.IntParameter()
def output(self):
config = output()
filepattern = 'xml_export_{min}_{max}.xml'.format(
min=self.min,
max=self.max,
)
filename = os.path.join(config.search_files, filepattern)
return LocalTarget(filename)
def run(self):
with self.output().open('w') as raw:
- with cursor(db()) as cur:
- results = exporter.range(cur, self.min, self.max)
? ---- ^^^
+ results = exporter.range(db(), self.min, self.max)
? ^^^^
- exporter.write(raw, results)
? ----
+ exporter.write(raw, results)
|
a6f291a3beb7ecb7d67b81fe92e7cca6db2139dc
|
example_scraper.py
|
example_scraper.py
|
import json
import requests
API = 'http://localhost:8000/api/1.0'
AUTH_PARAMS = {
'email': '[email protected]',
'api_key': 'edfe6c5ffd1be4d3bf22f69188ac6bc0fc04c84b'
}
# Create dataset
dataset = {
'name': 'Test Dataset from API',
'schema': [{
'column': 'A',
'type': 'unicode'
}, {
'column': 'B',
'type': 'unicode'
}, {
'column': 'C',
'type': 'unicode'
}]
}
response = requests.post(API + '/dataset/', json.dumps(dataset), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
dataset = json.loads(response.content)
# Write data
data = { 'objects': [{
'data': ['The', 'PANDA', 'lives.']
}, {
'data': ['More', 'data', 'here.']
}]}
response = requests.put(API + '/dataset/%s/data/' % dataset['slug'], json.dumps(data), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
print response.content
|
import json
import requests
API = 'http://localhost:8000/api/1.0'
AUTH_PARAMS = {
'email': '[email protected]',
'api_key': 'edfe6c5ffd1be4d3bf22f69188ac6bc0fc04c84b'
}
DATASET_SLUG = 'test-dataset'
# Check if dataset exists
response = requests.get(API + '/dataset/%s/' % DATASET_SLUG, params=AUTH_PARAMS)
# Create dataset if necessary
if response.status_code == 404:
dataset = {
'name': 'Test Dataset from API',
'schema': [{
'column': 'A',
'type': 'unicode'
}, {
'column': 'B',
'type': 'unicode'
}, {
'column': 'C',
'type': 'unicode'
}]
}
response = requests.put(API + '/dataset/%s/' % DATASET_SLUG, json.dumps(dataset), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
# Write data
data = { 'objects': [{
'data': ['The', 'PANDA', 'lives.']
}, {
'data': ['More', 'data', 'here.']
}]}
response = requests.put(API + '/dataset/%s/data/' % DATASET_SLUG, json.dumps(data), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
|
Update example scraper to use known slug.
|
Update example scraper to use known slug.
|
Python
|
mit
|
PalmBeachPost/panda,pandaproject/panda,PalmBeachPost/panda,newsapps/panda,ibrahimcesar/panda,pandaproject/panda,NUKnightLab/panda,ibrahimcesar/panda,NUKnightLab/panda,datadesk/panda,PalmBeachPost/panda,PalmBeachPost/panda,NUKnightLab/panda,ibrahimcesar/panda,datadesk/panda,ibrahimcesar/panda,pandaproject/panda,newsapps/panda,NUKnightLab/panda,datadesk/panda,pandaproject/panda,ibrahimcesar/panda,pandaproject/panda,datadesk/panda,newsapps/panda,PalmBeachPost/panda,newsapps/panda,datadesk/panda
|
import json
import requests
API = 'http://localhost:8000/api/1.0'
AUTH_PARAMS = {
'email': '[email protected]',
'api_key': 'edfe6c5ffd1be4d3bf22f69188ac6bc0fc04c84b'
}
+ DATASET_SLUG = 'test-dataset'
+ # Check if dataset exists
+ response = requests.get(API + '/dataset/%s/' % DATASET_SLUG, params=AUTH_PARAMS)
- # Create dataset
- dataset = {
- 'name': 'Test Dataset from API',
- 'schema': [{
- 'column': 'A',
- 'type': 'unicode'
- }, {
- 'column': 'B',
- 'type': 'unicode'
- }, {
- 'column': 'C',
- 'type': 'unicode'
- }]
- }
- response = requests.post(API + '/dataset/', json.dumps(dataset), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
+ # Create dataset if necessary
+ if response.status_code == 404:
+ dataset = {
+ 'name': 'Test Dataset from API',
+ 'schema': [{
+ 'column': 'A',
+ 'type': 'unicode'
+ }, {
+ 'column': 'B',
+ 'type': 'unicode'
+ }, {
+ 'column': 'C',
+ 'type': 'unicode'
+ }]
+ }
- dataset = json.loads(response.content)
+ response = requests.put(API + '/dataset/%s/' % DATASET_SLUG, json.dumps(dataset), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
# Write data
data = { 'objects': [{
'data': ['The', 'PANDA', 'lives.']
}, {
'data': ['More', 'data', 'here.']
}]}
- response = requests.put(API + '/dataset/%s/data/' % dataset['slug'], json.dumps(data), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
+ response = requests.put(API + '/dataset/%s/data/' % DATASET_SLUG, json.dumps(data), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
- print response.content
|
Update example scraper to use known slug.
|
## Code Before:
import json
import requests
API = 'http://localhost:8000/api/1.0'
AUTH_PARAMS = {
'email': '[email protected]',
'api_key': 'edfe6c5ffd1be4d3bf22f69188ac6bc0fc04c84b'
}
# Create dataset
dataset = {
'name': 'Test Dataset from API',
'schema': [{
'column': 'A',
'type': 'unicode'
}, {
'column': 'B',
'type': 'unicode'
}, {
'column': 'C',
'type': 'unicode'
}]
}
response = requests.post(API + '/dataset/', json.dumps(dataset), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
dataset = json.loads(response.content)
# Write data
data = { 'objects': [{
'data': ['The', 'PANDA', 'lives.']
}, {
'data': ['More', 'data', 'here.']
}]}
response = requests.put(API + '/dataset/%s/data/' % dataset['slug'], json.dumps(data), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
print response.content
## Instruction:
Update example scraper to use known slug.
## Code After:
import json
import requests
API = 'http://localhost:8000/api/1.0'
AUTH_PARAMS = {
'email': '[email protected]',
'api_key': 'edfe6c5ffd1be4d3bf22f69188ac6bc0fc04c84b'
}
DATASET_SLUG = 'test-dataset'
# Check if dataset exists
response = requests.get(API + '/dataset/%s/' % DATASET_SLUG, params=AUTH_PARAMS)
# Create dataset if necessary
if response.status_code == 404:
dataset = {
'name': 'Test Dataset from API',
'schema': [{
'column': 'A',
'type': 'unicode'
}, {
'column': 'B',
'type': 'unicode'
}, {
'column': 'C',
'type': 'unicode'
}]
}
response = requests.put(API + '/dataset/%s/' % DATASET_SLUG, json.dumps(dataset), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
# Write data
data = { 'objects': [{
'data': ['The', 'PANDA', 'lives.']
}, {
'data': ['More', 'data', 'here.']
}]}
response = requests.put(API + '/dataset/%s/data/' % DATASET_SLUG, json.dumps(data), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
|
import json
import requests
API = 'http://localhost:8000/api/1.0'
AUTH_PARAMS = {
'email': '[email protected]',
'api_key': 'edfe6c5ffd1be4d3bf22f69188ac6bc0fc04c84b'
}
+ DATASET_SLUG = 'test-dataset'
+ # Check if dataset exists
+ response = requests.get(API + '/dataset/%s/' % DATASET_SLUG, params=AUTH_PARAMS)
- # Create dataset
- dataset = {
- 'name': 'Test Dataset from API',
- 'schema': [{
- 'column': 'A',
- 'type': 'unicode'
- }, {
- 'column': 'B',
- 'type': 'unicode'
- }, {
- 'column': 'C',
- 'type': 'unicode'
- }]
- }
- response = requests.post(API + '/dataset/', json.dumps(dataset), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
+ # Create dataset if necessary
+ if response.status_code == 404:
+ dataset = {
+ 'name': 'Test Dataset from API',
+ 'schema': [{
+ 'column': 'A',
+ 'type': 'unicode'
+ }, {
+ 'column': 'B',
+ 'type': 'unicode'
+ }, {
+ 'column': 'C',
+ 'type': 'unicode'
+ }]
+ }
- dataset = json.loads(response.content)
+ response = requests.put(API + '/dataset/%s/' % DATASET_SLUG, json.dumps(dataset), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
# Write data
data = { 'objects': [{
'data': ['The', 'PANDA', 'lives.']
}, {
'data': ['More', 'data', 'here.']
}]}
- response = requests.put(API + '/dataset/%s/data/' % dataset['slug'], json.dumps(data), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
? ^^^^^^^^^^^^^^^
+ response = requests.put(API + '/dataset/%s/data/' % DATASET_SLUG, json.dumps(data), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
? ^^^^^^^^^^^^
- print response.content
|
10ddda3e230aa72889c81cd69792122b265010fe
|
rental/views/rental_state_view.py
|
rental/views/rental_state_view.py
|
from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
|
from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
from rental.availability import Availability
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
def check_availability(self, rental):
availability = Availability(rental.start_date, rental.return_date, rental.depot_id)
for item_rental in rental.itemrental_set:
intervals = availability.get_availability_intervals(item_rental.item)
available = availability.get_minimum_availability(intervals)
if item_rental.quantity > available:
raise ValidationError({
'quantity': 'The quantity must not exceed the availability '
'of the item in the requested time frame.'
})
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
if state == Rental.STATE_APPROVED:
self.check_availability(rental)
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
|
Check availability when approving rental request
|
Check availability when approving rental request
|
Python
|
agpl-3.0
|
verleihtool/verleihtool,verleihtool/verleihtool,verleihtool/verleihtool,verleihtool/verleihtool
|
from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
+ from rental.availability import Availability
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
+
+ def check_availability(self, rental):
+ availability = Availability(rental.start_date, rental.return_date, rental.depot_id)
+
+ for item_rental in rental.itemrental_set:
+ intervals = availability.get_availability_intervals(item_rental.item)
+ available = availability.get_minimum_availability(intervals)
+
+ if item_rental.quantity > available:
+ raise ValidationError({
+ 'quantity': 'The quantity must not exceed the availability '
+ 'of the item in the requested time frame.'
+ })
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
+ if state == Rental.STATE_APPROVED:
+ self.check_availability(rental)
+
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
|
Check availability when approving rental request
|
## Code Before:
from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
## Instruction:
Check availability when approving rental request
## Code After:
from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
from rental.availability import Availability
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
def check_availability(self, rental):
availability = Availability(rental.start_date, rental.return_date, rental.depot_id)
for item_rental in rental.itemrental_set:
intervals = availability.get_availability_intervals(item_rental.item)
available = availability.get_minimum_availability(intervals)
if item_rental.quantity > available:
raise ValidationError({
'quantity': 'The quantity must not exceed the availability '
'of the item in the requested time frame.'
})
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
if state == Rental.STATE_APPROVED:
self.check_availability(rental)
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
|
from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
+ from rental.availability import Availability
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
+
+ def check_availability(self, rental):
+ availability = Availability(rental.start_date, rental.return_date, rental.depot_id)
+
+ for item_rental in rental.itemrental_set:
+ intervals = availability.get_availability_intervals(item_rental.item)
+ available = availability.get_minimum_availability(intervals)
+
+ if item_rental.quantity > available:
+ raise ValidationError({
+ 'quantity': 'The quantity must not exceed the availability '
+ 'of the item in the requested time frame.'
+ })
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
+ if state == Rental.STATE_APPROVED:
+ self.check_availability(rental)
+
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
|
030d425bb2b9b552516957277aebb22806bfc699
|
bills/redis_queue.py
|
bills/redis_queue.py
|
import redis
class RedisQueue(object):
"""Simple Queue with Redis Backend"""
def __init__(self, name, namespace='queue', **redis_kwargs):
"""The default connection parameters are: host='localhost', port=6379, db=0"""
self.db = redis.Redis(**redis_kwargs)
self.key = '%s:%s' %(namespace, name)
def qsize(self):
"""Return the approximate size of the queue."""
return self.db.llen(self.key)
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return self.qsize() == 0
def put(self, item):
"""Put item into the queue."""
self.db.rpush(self.key, item)
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args block is true and timeout is None (the default), block
if necessary until an item is available."""
if block:
item = self.db.blpop(self.key, timeout=timeout)
else:
item = self.db.lpop(self.key)
if item:
item = item[1]
return item
def get_nowait(self):
"""Equivalent to get(False)."""
return self.get(False)
def __iter__(self):
return self
def next(self):
item = self.get(False)
if item is None:
raise StopIteration
return item
|
import redis
class RedisQueue(object):
"""Simple Queue with Redis Backend"""
def __init__(self, name, namespace='queue', **redis_kwargs):
"""The default connection parameters are: host='localhost', port=6379, db=0"""
self.db = redis.Redis(**redis_kwargs)
self.key = '%s:%s' %(namespace, name)
def qsize(self):
"""Return the approximate size of the queue."""
return self.db.llen(self.key)
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return self.qsize() == 0
def put(self, item):
"""Put item into the queue."""
self.db.rpush(self.key, item)
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args block is true and timeout is None (the default), block
if necessary until an item is available."""
if block:
item = self.db.blpop(self.key, timeout=timeout)
if item:
item = item[1]
else:
item = self.db.lpop(self.key)
return item
def get_nowait(self):
"""Equivalent to get(False)."""
return self.get(False)
def __iter__(self):
return self
def next(self):
item = self.get(False)
if item is None:
raise StopIteration
return item
|
Fix a bug in redis queue
|
Fix a bug in redis queue
|
Python
|
agpl-3.0
|
teampopong/crawlers,majorika/crawlers,majorika/crawlers,lexifdev/crawlers,lexifdev/crawlers,teampopong/crawlers
|
import redis
class RedisQueue(object):
"""Simple Queue with Redis Backend"""
def __init__(self, name, namespace='queue', **redis_kwargs):
"""The default connection parameters are: host='localhost', port=6379, db=0"""
self.db = redis.Redis(**redis_kwargs)
self.key = '%s:%s' %(namespace, name)
def qsize(self):
"""Return the approximate size of the queue."""
return self.db.llen(self.key)
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return self.qsize() == 0
def put(self, item):
"""Put item into the queue."""
self.db.rpush(self.key, item)
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args block is true and timeout is None (the default), block
if necessary until an item is available."""
if block:
item = self.db.blpop(self.key, timeout=timeout)
+ if item:
+ item = item[1]
else:
item = self.db.lpop(self.key)
- if item:
- item = item[1]
return item
def get_nowait(self):
"""Equivalent to get(False)."""
return self.get(False)
def __iter__(self):
return self
def next(self):
item = self.get(False)
if item is None:
raise StopIteration
return item
|
Fix a bug in redis queue
|
## Code Before:
import redis
class RedisQueue(object):
"""Simple Queue with Redis Backend"""
def __init__(self, name, namespace='queue', **redis_kwargs):
"""The default connection parameters are: host='localhost', port=6379, db=0"""
self.db = redis.Redis(**redis_kwargs)
self.key = '%s:%s' %(namespace, name)
def qsize(self):
"""Return the approximate size of the queue."""
return self.db.llen(self.key)
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return self.qsize() == 0
def put(self, item):
"""Put item into the queue."""
self.db.rpush(self.key, item)
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args block is true and timeout is None (the default), block
if necessary until an item is available."""
if block:
item = self.db.blpop(self.key, timeout=timeout)
else:
item = self.db.lpop(self.key)
if item:
item = item[1]
return item
def get_nowait(self):
"""Equivalent to get(False)."""
return self.get(False)
def __iter__(self):
return self
def next(self):
item = self.get(False)
if item is None:
raise StopIteration
return item
## Instruction:
Fix a bug in redis queue
## Code After:
import redis
class RedisQueue(object):
"""Simple Queue with Redis Backend"""
def __init__(self, name, namespace='queue', **redis_kwargs):
"""The default connection parameters are: host='localhost', port=6379, db=0"""
self.db = redis.Redis(**redis_kwargs)
self.key = '%s:%s' %(namespace, name)
def qsize(self):
"""Return the approximate size of the queue."""
return self.db.llen(self.key)
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return self.qsize() == 0
def put(self, item):
"""Put item into the queue."""
self.db.rpush(self.key, item)
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args block is true and timeout is None (the default), block
if necessary until an item is available."""
if block:
item = self.db.blpop(self.key, timeout=timeout)
if item:
item = item[1]
else:
item = self.db.lpop(self.key)
return item
def get_nowait(self):
"""Equivalent to get(False)."""
return self.get(False)
def __iter__(self):
return self
def next(self):
item = self.get(False)
if item is None:
raise StopIteration
return item
|
import redis
class RedisQueue(object):
"""Simple Queue with Redis Backend"""
def __init__(self, name, namespace='queue', **redis_kwargs):
"""The default connection parameters are: host='localhost', port=6379, db=0"""
self.db = redis.Redis(**redis_kwargs)
self.key = '%s:%s' %(namespace, name)
def qsize(self):
"""Return the approximate size of the queue."""
return self.db.llen(self.key)
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return self.qsize() == 0
def put(self, item):
"""Put item into the queue."""
self.db.rpush(self.key, item)
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args block is true and timeout is None (the default), block
if necessary until an item is available."""
if block:
item = self.db.blpop(self.key, timeout=timeout)
+ if item:
+ item = item[1]
else:
item = self.db.lpop(self.key)
- if item:
- item = item[1]
return item
def get_nowait(self):
"""Equivalent to get(False)."""
return self.get(False)
def __iter__(self):
return self
def next(self):
item = self.get(False)
if item is None:
raise StopIteration
return item
|
d3e2a11f72f6de811f39ac10aa0abde74b99d269
|
hcibench/pipeline/__init__.py
|
hcibench/pipeline/__init__.py
|
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
|
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
|
Make Estimator importable from pipeline.
|
Make Estimator importable from pipeline.
|
Python
|
mit
|
ucdrascal/axopy,ucdrascal/hcibench
|
from .base import PipelineBlock, Pipeline, PassthroughPipeline
- from .common import Windower, Filter, FeatureExtractor
+ from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
|
Make Estimator importable from pipeline.
|
## Code Before:
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
## Instruction:
Make Estimator importable from pipeline.
## Code After:
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
|
from .base import PipelineBlock, Pipeline, PassthroughPipeline
- from .common import Windower, Filter, FeatureExtractor
+ from .common import Windower, Filter, FeatureExtractor, Estimator
? +++++++++++
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
|
08331a081713f880d5eca4fb7b18f4c61e360132
|
tests/skipif_markers.py
|
tests/skipif_markers.py
|
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
# For some reason pytest incorrectly uses the first reason text regardless of
# which condition matches. Using a unified message for now
# travis_reason = 'Works locally with tox but fails on Travis.'
# no_network_reason = 'Needs a network connection to GitHub.'
reason = (
'Fails on Travis or else there is no network connection to '
'GitHub/Bitbucket.'
)
skipif_travis = pytest.mark.skipif(travis, reason=reason)
skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
|
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
skipif_travis = pytest.mark.skipif(
travis, reason='Works locally with tox but fails on Travis.'
)
skipif_no_network = pytest.mark.skipif(
no_network, reason='Needs a network connection to GitHub/Bitbucket.'
)
|
Revert skipif markers to use correct reasons (bug fixed in pytest)
|
Revert skipif markers to use correct reasons (bug fixed in pytest)
|
Python
|
bsd-3-clause
|
hackebrot/cookiecutter,michaeljoseph/cookiecutter,willingc/cookiecutter,stevepiercy/cookiecutter,pjbull/cookiecutter,stevepiercy/cookiecutter,audreyr/cookiecutter,terryjbates/cookiecutter,luzfcb/cookiecutter,dajose/cookiecutter,dajose/cookiecutter,michaeljoseph/cookiecutter,Springerle/cookiecutter,terryjbates/cookiecutter,willingc/cookiecutter,pjbull/cookiecutter,luzfcb/cookiecutter,audreyr/cookiecutter,Springerle/cookiecutter,hackebrot/cookiecutter
|
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
+ skipif_travis = pytest.mark.skipif(
- # For some reason pytest incorrectly uses the first reason text regardless of
- # which condition matches. Using a unified message for now
- # travis_reason = 'Works locally with tox but fails on Travis.'
+ travis, reason='Works locally with tox but fails on Travis.'
- # no_network_reason = 'Needs a network connection to GitHub.'
- reason = (
- 'Fails on Travis or else there is no network connection to '
- 'GitHub/Bitbucket.'
)
- skipif_travis = pytest.mark.skipif(travis, reason=reason)
- skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
+ skipif_no_network = pytest.mark.skipif(
+ no_network, reason='Needs a network connection to GitHub/Bitbucket.'
+ )
|
Revert skipif markers to use correct reasons (bug fixed in pytest)
|
## Code Before:
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
# For some reason pytest incorrectly uses the first reason text regardless of
# which condition matches. Using a unified message for now
# travis_reason = 'Works locally with tox but fails on Travis.'
# no_network_reason = 'Needs a network connection to GitHub.'
reason = (
'Fails on Travis or else there is no network connection to '
'GitHub/Bitbucket.'
)
skipif_travis = pytest.mark.skipif(travis, reason=reason)
skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
## Instruction:
Revert skipif markers to use correct reasons (bug fixed in pytest)
## Code After:
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
skipif_travis = pytest.mark.skipif(
travis, reason='Works locally with tox but fails on Travis.'
)
skipif_no_network = pytest.mark.skipif(
no_network, reason='Needs a network connection to GitHub/Bitbucket.'
)
|
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
+ skipif_travis = pytest.mark.skipif(
- # For some reason pytest incorrectly uses the first reason text regardless of
- # which condition matches. Using a unified message for now
- # travis_reason = 'Works locally with tox but fails on Travis.'
? ^ ^ - -
+ travis, reason='Works locally with tox but fails on Travis.'
? ^^^ ^^
- # no_network_reason = 'Needs a network connection to GitHub.'
- reason = (
- 'Fails on Travis or else there is no network connection to '
- 'GitHub/Bitbucket.'
)
- skipif_travis = pytest.mark.skipif(travis, reason=reason)
- skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
? --------------------------
+ skipif_no_network = pytest.mark.skipif(
+ no_network, reason='Needs a network connection to GitHub/Bitbucket.'
+ )
|
c823a476b265b46d27b221831be952a811fe3468
|
ANN.py
|
ANN.py
|
class Neuron:
pass
class NeuronNetwork:
neurons = []
|
class Neuron:
pass
class NeuronNetwork:
neurons = []
def __init__(self, rows, columns):
self.neurons = []
for row in xrange(rows):
self.neurons.append([])
for column in xrange(columns):
self.neurons[row].append(Neuron())
|
Create 2D list of Neurons in NeuronNetwork's init
|
Create 2D list of Neurons in NeuronNetwork's init
|
Python
|
mit
|
tysonzero/py-ann
|
class Neuron:
pass
class NeuronNetwork:
neurons = []
+ def __init__(self, rows, columns):
+ self.neurons = []
+ for row in xrange(rows):
+ self.neurons.append([])
+ for column in xrange(columns):
+ self.neurons[row].append(Neuron())
+
|
Create 2D list of Neurons in NeuronNetwork's init
|
## Code Before:
class Neuron:
pass
class NeuronNetwork:
neurons = []
## Instruction:
Create 2D list of Neurons in NeuronNetwork's init
## Code After:
class Neuron:
pass
class NeuronNetwork:
neurons = []
def __init__(self, rows, columns):
self.neurons = []
for row in xrange(rows):
self.neurons.append([])
for column in xrange(columns):
self.neurons[row].append(Neuron())
|
class Neuron:
pass
class NeuronNetwork:
neurons = []
+
+ def __init__(self, rows, columns):
+ self.neurons = []
+ for row in xrange(rows):
+ self.neurons.append([])
+ for column in xrange(columns):
+ self.neurons[row].append(Neuron())
|
c1ed5eb96b04ca0af2ad8f26023d8cbaa4a75eda
|
rx/concurrency/threadpoolscheduler.py
|
rx/concurrency/threadpoolscheduler.py
|
import logging
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler, Disposable
from rx.disposables import SingleAssignmentDisposable, CompositeDisposable
from .timeoutscheduler import TimeoutScheduler
log = logging.getLogger("Rx")
class ThreadPoolScheduler(TimeoutScheduler):
"""A scheduler that schedules work via the thread pool and threading
timers."""
def __init__(self, max_workers=None):
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def schedule(self, action, state=None):
"""Schedules an action to be executed."""
disposable = SingleAssignmentDisposable()
def run():
disposable.disposable = self.invoke_action(action, state)
future = self.executor.submit(run)
def dispose():
future.cancel()
return CompositeDisposable(disposable, Disposable.create(dispose))
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler
from .newthreadscheduler import NewThreadScheduler
class ThreadPoolScheduler(NewThreadScheduler):
"""A scheduler that schedules work via the thread pool."""
class ThreadPoolThread:
"""Wraps a concurrent future as a thread."""
def __init__(self, executor, run):
self.run = run
self.future = None
self.executor = executor
def start(self):
self.future = self.executor.submit(self.run)
def cancel(self):
self.future.cancel()
def __init__(self, max_workers=None):
super(ThreadPoolScheduler, self).__init__(self.thread_factory)
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def thread_factory(self, target, *args):
return self.ThreadPoolThread(self.executor, target)
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
Make thread pool scheduler behave as a pooled new thread scheduler
|
Make thread pool scheduler behave as a pooled new thread scheduler
|
Python
|
mit
|
ReactiveX/RxPY,ReactiveX/RxPY
|
- import logging
from concurrent.futures import ThreadPoolExecutor
- from rx.core import Scheduler, Disposable
+ from rx.core import Scheduler
- from rx.disposables import SingleAssignmentDisposable, CompositeDisposable
- from .timeoutscheduler import TimeoutScheduler
+ from .newthreadscheduler import NewThreadScheduler
-
- log = logging.getLogger("Rx")
- class ThreadPoolScheduler(TimeoutScheduler):
+ class ThreadPoolScheduler(NewThreadScheduler):
- """A scheduler that schedules work via the thread pool and threading
+ """A scheduler that schedules work via the thread pool."""
- timers."""
+
+ class ThreadPoolThread:
+ """Wraps a concurrent future as a thread."""
+
+ def __init__(self, executor, run):
+ self.run = run
+ self.future = None
+ self.executor = executor
+
+ def start(self):
+ self.future = self.executor.submit(self.run)
+
+ def cancel(self):
+ self.future.cancel()
def __init__(self, max_workers=None):
+ super(ThreadPoolScheduler, self).__init__(self.thread_factory)
self.executor = ThreadPoolExecutor(max_workers=max_workers)
+ def thread_factory(self, target, *args):
+ return self.ThreadPoolThread(self.executor, target)
- def schedule(self, action, state=None):
- """Schedules an action to be executed."""
-
- disposable = SingleAssignmentDisposable()
-
- def run():
- disposable.disposable = self.invoke_action(action, state)
- future = self.executor.submit(run)
-
- def dispose():
- future.cancel()
- return CompositeDisposable(disposable, Disposable.create(dispose))
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
Make thread pool scheduler behave as a pooled new thread scheduler
|
## Code Before:
import logging
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler, Disposable
from rx.disposables import SingleAssignmentDisposable, CompositeDisposable
from .timeoutscheduler import TimeoutScheduler
log = logging.getLogger("Rx")
class ThreadPoolScheduler(TimeoutScheduler):
"""A scheduler that schedules work via the thread pool and threading
timers."""
def __init__(self, max_workers=None):
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def schedule(self, action, state=None):
"""Schedules an action to be executed."""
disposable = SingleAssignmentDisposable()
def run():
disposable.disposable = self.invoke_action(action, state)
future = self.executor.submit(run)
def dispose():
future.cancel()
return CompositeDisposable(disposable, Disposable.create(dispose))
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
## Instruction:
Make thread pool scheduler behave as a pooled new thread scheduler
## Code After:
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler
from .newthreadscheduler import NewThreadScheduler
class ThreadPoolScheduler(NewThreadScheduler):
"""A scheduler that schedules work via the thread pool."""
class ThreadPoolThread:
"""Wraps a concurrent future as a thread."""
def __init__(self, executor, run):
self.run = run
self.future = None
self.executor = executor
def start(self):
self.future = self.executor.submit(self.run)
def cancel(self):
self.future.cancel()
def __init__(self, max_workers=None):
super(ThreadPoolScheduler, self).__init__(self.thread_factory)
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def thread_factory(self, target, *args):
return self.ThreadPoolThread(self.executor, target)
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
- import logging
from concurrent.futures import ThreadPoolExecutor
- from rx.core import Scheduler, Disposable
? ------------
+ from rx.core import Scheduler
- from rx.disposables import SingleAssignmentDisposable, CompositeDisposable
- from .timeoutscheduler import TimeoutScheduler
? ^^ ^^^ ^^ ^^^
+ from .newthreadscheduler import NewThreadScheduler
? +++ ^^ ^^ +++ ^^ ^^
-
- log = logging.getLogger("Rx")
- class ThreadPoolScheduler(TimeoutScheduler):
? ^^ ^^^
+ class ThreadPoolScheduler(NewThreadScheduler):
? +++ ^^ ^^
- """A scheduler that schedules work via the thread pool and threading
? ^^^^^^^^^^^^^^
+ """A scheduler that schedules work via the thread pool."""
? ^^^^
- timers."""
+
+ class ThreadPoolThread:
+ """Wraps a concurrent future as a thread."""
+
+ def __init__(self, executor, run):
+ self.run = run
+ self.future = None
+ self.executor = executor
+
+ def start(self):
+ self.future = self.executor.submit(self.run)
+
+ def cancel(self):
+ self.future.cancel()
def __init__(self, max_workers=None):
+ super(ThreadPoolScheduler, self).__init__(self.thread_factory)
self.executor = ThreadPoolExecutor(max_workers=max_workers)
+ def thread_factory(self, target, *args):
+ return self.ThreadPoolThread(self.executor, target)
- def schedule(self, action, state=None):
- """Schedules an action to be executed."""
-
- disposable = SingleAssignmentDisposable()
-
- def run():
- disposable.disposable = self.invoke_action(action, state)
- future = self.executor.submit(run)
-
- def dispose():
- future.cancel()
- return CompositeDisposable(disposable, Disposable.create(dispose))
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
8a28ae1c319f80e56146f1a5029222cb144d4650
|
mempoke.py
|
mempoke.py
|
import gdb
import struct
class DeviceMemory:
def __init__(self):
self.inferior = gdb.selected_inferior()
def __del__(self):
del self.inferior
def read(self, address):
return struct.unpack('I', self.inferior.read_memory(address, 4))[0]
def write(self, address, value):
value_bytes = struct.pack('I', value)
self.inferior.write_memory(address, value_bytes)
|
import gdb
import struct
class DeviceMemory:
def __init__(self):
self.inferior = gdb.selected_inferior()
def __del__(self):
del self.inferior
def read(self, address):
return struct.unpack('I', self.inferior.read_memory(address, 4))[0]
def write(self, address, value):
value_bytes = struct.pack('I', value)
self.inferior.write_memory(address, value_bytes)
class MMPeripheral(object):
"""Memory Mapped MCU Peripheral"""
def __init__(self, address, device_memory):
self.device_memory = device_memory
self.address = address
def __getattr__(self, name):
if name in self.fields:
return self.device.read(self.address + self.fields[name])
else:
raise ValueError('This peripheral does not contain register ' + name)
def __setattr__(self, name, value):
if name in self.fields:
self.device.write(self.address + self.fields[name], value)
else:
super(MMPeripheral, self).__setattr__(name, value)
|
Add mechanism for defining registers in memory mapped peripherals of MCU
|
Add mechanism for defining registers in memory mapped peripherals of MCU
|
Python
|
mit
|
fmfi-svt-deadlock/hw-testing,fmfi-svt-deadlock/hw-testing
|
import gdb
import struct
class DeviceMemory:
def __init__(self):
self.inferior = gdb.selected_inferior()
def __del__(self):
del self.inferior
def read(self, address):
return struct.unpack('I', self.inferior.read_memory(address, 4))[0]
def write(self, address, value):
value_bytes = struct.pack('I', value)
self.inferior.write_memory(address, value_bytes)
+
+ class MMPeripheral(object):
+ """Memory Mapped MCU Peripheral"""
+
+ def __init__(self, address, device_memory):
+ self.device_memory = device_memory
+ self.address = address
+
+ def __getattr__(self, name):
+ if name in self.fields:
+ return self.device.read(self.address + self.fields[name])
+ else:
+ raise ValueError('This peripheral does not contain register ' + name)
+
+ def __setattr__(self, name, value):
+ if name in self.fields:
+ self.device.write(self.address + self.fields[name], value)
+ else:
+ super(MMPeripheral, self).__setattr__(name, value)
+
|
Add mechanism for defining registers in memory mapped peripherals of MCU
|
## Code Before:
import gdb
import struct
class DeviceMemory:
def __init__(self):
self.inferior = gdb.selected_inferior()
def __del__(self):
del self.inferior
def read(self, address):
return struct.unpack('I', self.inferior.read_memory(address, 4))[0]
def write(self, address, value):
value_bytes = struct.pack('I', value)
self.inferior.write_memory(address, value_bytes)
## Instruction:
Add mechanism for defining registers in memory mapped peripherals of MCU
## Code After:
import gdb
import struct
class DeviceMemory:
def __init__(self):
self.inferior = gdb.selected_inferior()
def __del__(self):
del self.inferior
def read(self, address):
return struct.unpack('I', self.inferior.read_memory(address, 4))[0]
def write(self, address, value):
value_bytes = struct.pack('I', value)
self.inferior.write_memory(address, value_bytes)
class MMPeripheral(object):
"""Memory Mapped MCU Peripheral"""
def __init__(self, address, device_memory):
self.device_memory = device_memory
self.address = address
def __getattr__(self, name):
if name in self.fields:
return self.device.read(self.address + self.fields[name])
else:
raise ValueError('This peripheral does not contain register ' + name)
def __setattr__(self, name, value):
if name in self.fields:
self.device.write(self.address + self.fields[name], value)
else:
super(MMPeripheral, self).__setattr__(name, value)
|
import gdb
import struct
class DeviceMemory:
def __init__(self):
self.inferior = gdb.selected_inferior()
def __del__(self):
del self.inferior
def read(self, address):
return struct.unpack('I', self.inferior.read_memory(address, 4))[0]
def write(self, address, value):
value_bytes = struct.pack('I', value)
self.inferior.write_memory(address, value_bytes)
+
+
+ class MMPeripheral(object):
+ """Memory Mapped MCU Peripheral"""
+
+ def __init__(self, address, device_memory):
+ self.device_memory = device_memory
+ self.address = address
+
+ def __getattr__(self, name):
+ if name in self.fields:
+ return self.device.read(self.address + self.fields[name])
+ else:
+ raise ValueError('This peripheral does not contain register ' + name)
+
+ def __setattr__(self, name, value):
+ if name in self.fields:
+ self.device.write(self.address + self.fields[name], value)
+ else:
+ super(MMPeripheral, self).__setattr__(name, value)
|
4aab1eb2d2d3a0c9b9c4ab6df23b043e6822ff84
|
examples/delta/delta.py
|
examples/delta/delta.py
|
import sys
from SALib.analyze import delta
from SALib.util import read_param_file
import numpy as np
sys.path.append('../..')
# Read the parameter range file and generate samples
# Since this is "given data", the bounds in the parameter file will not be used
# but the columns are still expected
problem = read_param_file('../../src/SALib/test_functions/params/Ishigami.txt')
X = np.loadtxt('model_input.txt')
Y = np.loadtxt('model_output.txt')
# Perform the sensitivity analysis using the model output
# Specify which column of the output file to analyze (zero-indexed)
Si = delta.analyze(problem, X, Y, num_resamples=10, conf_level=0.95, print_to_console=False)
# Returns a dictionary with keys 'delta', 'delta_conf', 'S1', 'S1_conf'
print(str(Si['delta']))
|
import sys
from SALib.analyze import delta
from SALib.util import read_param_file
import numpy as np
sys.path.append('../..')
# Read the parameter range file and generate samples
# Since this is "given data", the bounds in the parameter file will not be used
# but the columns are still expected
problem = read_param_file('../../src/SALib/test_functions/params/Ishigami.txt')
X = np.loadtxt('../data/model_input.txt')
Y = np.loadtxt('../data/model_output.txt')
# Perform the sensitivity analysis using the model output
# Specify which column of the output file to analyze (zero-indexed)
Si = delta.analyze(problem, X, Y, num_resamples=10, conf_level=0.95, print_to_console=False)
# Returns a dictionary with keys 'delta', 'delta_conf', 'S1', 'S1_conf'
print(str(Si['delta']))
|
Fix up example with corrected path
|
Fix up example with corrected path
|
Python
|
mit
|
jdherman/SALib,SALib/SALib,jdherman/SALib
|
import sys
from SALib.analyze import delta
from SALib.util import read_param_file
import numpy as np
sys.path.append('../..')
# Read the parameter range file and generate samples
# Since this is "given data", the bounds in the parameter file will not be used
# but the columns are still expected
problem = read_param_file('../../src/SALib/test_functions/params/Ishigami.txt')
- X = np.loadtxt('model_input.txt')
+ X = np.loadtxt('../data/model_input.txt')
- Y = np.loadtxt('model_output.txt')
+ Y = np.loadtxt('../data/model_output.txt')
# Perform the sensitivity analysis using the model output
# Specify which column of the output file to analyze (zero-indexed)
Si = delta.analyze(problem, X, Y, num_resamples=10, conf_level=0.95, print_to_console=False)
# Returns a dictionary with keys 'delta', 'delta_conf', 'S1', 'S1_conf'
print(str(Si['delta']))
|
Fix up example with corrected path
|
## Code Before:
import sys
from SALib.analyze import delta
from SALib.util import read_param_file
import numpy as np
sys.path.append('../..')
# Read the parameter range file and generate samples
# Since this is "given data", the bounds in the parameter file will not be used
# but the columns are still expected
problem = read_param_file('../../src/SALib/test_functions/params/Ishigami.txt')
X = np.loadtxt('model_input.txt')
Y = np.loadtxt('model_output.txt')
# Perform the sensitivity analysis using the model output
# Specify which column of the output file to analyze (zero-indexed)
Si = delta.analyze(problem, X, Y, num_resamples=10, conf_level=0.95, print_to_console=False)
# Returns a dictionary with keys 'delta', 'delta_conf', 'S1', 'S1_conf'
print(str(Si['delta']))
## Instruction:
Fix up example with corrected path
## Code After:
import sys
from SALib.analyze import delta
from SALib.util import read_param_file
import numpy as np
sys.path.append('../..')
# Read the parameter range file and generate samples
# Since this is "given data", the bounds in the parameter file will not be used
# but the columns are still expected
problem = read_param_file('../../src/SALib/test_functions/params/Ishigami.txt')
X = np.loadtxt('../data/model_input.txt')
Y = np.loadtxt('../data/model_output.txt')
# Perform the sensitivity analysis using the model output
# Specify which column of the output file to analyze (zero-indexed)
Si = delta.analyze(problem, X, Y, num_resamples=10, conf_level=0.95, print_to_console=False)
# Returns a dictionary with keys 'delta', 'delta_conf', 'S1', 'S1_conf'
print(str(Si['delta']))
|
import sys
from SALib.analyze import delta
from SALib.util import read_param_file
import numpy as np
sys.path.append('../..')
# Read the parameter range file and generate samples
# Since this is "given data", the bounds in the parameter file will not be used
# but the columns are still expected
problem = read_param_file('../../src/SALib/test_functions/params/Ishigami.txt')
- X = np.loadtxt('model_input.txt')
+ X = np.loadtxt('../data/model_input.txt')
? ++++++++
- Y = np.loadtxt('model_output.txt')
+ Y = np.loadtxt('../data/model_output.txt')
? ++++++++
# Perform the sensitivity analysis using the model output
# Specify which column of the output file to analyze (zero-indexed)
Si = delta.analyze(problem, X, Y, num_resamples=10, conf_level=0.95, print_to_console=False)
# Returns a dictionary with keys 'delta', 'delta_conf', 'S1', 'S1_conf'
print(str(Si['delta']))
|
68452ffc8490d976b043f660a0e3e1f19c4ed98e
|
great_expectations/actions/__init__.py
|
great_expectations/actions/__init__.py
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
SlackNotificationAction
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
Add Slack action to init
|
Add Slack action to init
|
Python
|
apache-2.0
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
+ SlackNotificationAction
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
Add Slack action to init
|
## Code Before:
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
## Instruction:
Add Slack action to init
## Code After:
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
SlackNotificationAction
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
+ SlackNotificationAction
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
952704b93004e5763231ad3e64f32135474651b2
|
common/templatetags/uqam.py
|
common/templatetags/uqam.py
|
from django import template
register = template.Library()
@register.filter
def dimension(value, arg):
"""
Dimension integers
If value, append arg, otherwise output nothing
"""
if value:
return str(value) + " " + arg
return ""
@register.filter
def verbose_name(obj):
"""
Return the verbose name of a model
"""
return obj._meta.verbose_name
@register.filter
def pdb(element):
"""
Inside a template do {{ template_var|pdb }}
"""
import ipdb
ipdb.set_trace()
return element
from cat.models import Category
from location.models import Country
@register.inclusion_tag('snippets/advanced_search_fields.html')
def advanced_search_fields():
categories = Category.objects.all()
places = Country.objects.all()
return {
'categories': categories,
'places': places,
}
|
from django import template
register = template.Library()
@register.filter
def dimension(value, arg):
"""
Dimension integers
If value, append arg, otherwise output nothing
"""
if value:
return str(value) + " " + arg
return ""
@register.filter
def verbose_name(obj):
"""
Return the verbose name of a model
"""
return obj._meta.verbose_name
@register.filter
def pdb(element):
"""
Inside a template do {{ template_var|pdb }}
"""
import ipdb
ipdb.set_trace()
return element
from cat.models import Category
from location.models import Country
@register.inclusion_tag('snippets/advanced_search_fields.html')
def advanced_search_fields():
categories = Category.objects.all().order_by('name')
places = Country.objects.all()
return {
'categories': categories,
'places': places,
}
|
Order categories in search fields
|
Order categories in search fields
|
Python
|
bsd-3-clause
|
uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam
|
from django import template
register = template.Library()
@register.filter
def dimension(value, arg):
"""
Dimension integers
If value, append arg, otherwise output nothing
"""
if value:
return str(value) + " " + arg
return ""
@register.filter
def verbose_name(obj):
"""
Return the verbose name of a model
"""
return obj._meta.verbose_name
@register.filter
def pdb(element):
"""
Inside a template do {{ template_var|pdb }}
"""
import ipdb
ipdb.set_trace()
return element
from cat.models import Category
from location.models import Country
@register.inclusion_tag('snippets/advanced_search_fields.html')
def advanced_search_fields():
- categories = Category.objects.all()
+ categories = Category.objects.all().order_by('name')
places = Country.objects.all()
return {
'categories': categories,
'places': places,
}
|
Order categories in search fields
|
## Code Before:
from django import template
register = template.Library()
@register.filter
def dimension(value, arg):
"""
Dimension integers
If value, append arg, otherwise output nothing
"""
if value:
return str(value) + " " + arg
return ""
@register.filter
def verbose_name(obj):
"""
Return the verbose name of a model
"""
return obj._meta.verbose_name
@register.filter
def pdb(element):
"""
Inside a template do {{ template_var|pdb }}
"""
import ipdb
ipdb.set_trace()
return element
from cat.models import Category
from location.models import Country
@register.inclusion_tag('snippets/advanced_search_fields.html')
def advanced_search_fields():
categories = Category.objects.all()
places = Country.objects.all()
return {
'categories': categories,
'places': places,
}
## Instruction:
Order categories in search fields
## Code After:
from django import template
register = template.Library()
@register.filter
def dimension(value, arg):
"""
Dimension integers
If value, append arg, otherwise output nothing
"""
if value:
return str(value) + " " + arg
return ""
@register.filter
def verbose_name(obj):
"""
Return the verbose name of a model
"""
return obj._meta.verbose_name
@register.filter
def pdb(element):
"""
Inside a template do {{ template_var|pdb }}
"""
import ipdb
ipdb.set_trace()
return element
from cat.models import Category
from location.models import Country
@register.inclusion_tag('snippets/advanced_search_fields.html')
def advanced_search_fields():
categories = Category.objects.all().order_by('name')
places = Country.objects.all()
return {
'categories': categories,
'places': places,
}
|
from django import template
register = template.Library()
@register.filter
def dimension(value, arg):
"""
Dimension integers
If value, append arg, otherwise output nothing
"""
if value:
return str(value) + " " + arg
return ""
@register.filter
def verbose_name(obj):
"""
Return the verbose name of a model
"""
return obj._meta.verbose_name
@register.filter
def pdb(element):
"""
Inside a template do {{ template_var|pdb }}
"""
import ipdb
ipdb.set_trace()
return element
from cat.models import Category
from location.models import Country
@register.inclusion_tag('snippets/advanced_search_fields.html')
def advanced_search_fields():
- categories = Category.objects.all()
+ categories = Category.objects.all().order_by('name')
? +++++++++++++++++
places = Country.objects.all()
return {
'categories': categories,
'places': places,
}
|
563220ef19395201aed7f6392519f84db4ec7a77
|
tests/test_midas.py
|
tests/test_midas.py
|
import datetime
from midas import mix
from midas.midas import estimate, forecast
def test_estimate(gdp_data, farmpay_data):
y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1,
start_date=datetime.datetime(1985, 1, 1),
end_date=datetime.datetime(2009, 1, 1))
res = estimate(y, yl, x)
fc = forecast(xf, ylf, res)
print(fc)
assert False
|
import datetime
import numpy as np
from midas import mix
from midas.midas import estimate, forecast
def test_estimate(gdp_data, farmpay_data):
y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1,
start_date=datetime.datetime(1985, 1, 1),
end_date=datetime.datetime(2009, 1, 1))
res = estimate(y, yl, x)
fc = forecast(xf, ylf, res)
print(fc)
assert np.isclose(fc.loc['2011-04-01'][0], 1.336844, rtol=1e-6)
|
Add assertion for forecast test
|
Add assertion for forecast test
|
Python
|
mit
|
mikemull/midaspy
|
import datetime
+ import numpy as np
from midas import mix
from midas.midas import estimate, forecast
def test_estimate(gdp_data, farmpay_data):
y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1,
start_date=datetime.datetime(1985, 1, 1),
end_date=datetime.datetime(2009, 1, 1))
res = estimate(y, yl, x)
fc = forecast(xf, ylf, res)
print(fc)
- assert False
+ assert np.isclose(fc.loc['2011-04-01'][0], 1.336844, rtol=1e-6)
|
Add assertion for forecast test
|
## Code Before:
import datetime
from midas import mix
from midas.midas import estimate, forecast
def test_estimate(gdp_data, farmpay_data):
y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1,
start_date=datetime.datetime(1985, 1, 1),
end_date=datetime.datetime(2009, 1, 1))
res = estimate(y, yl, x)
fc = forecast(xf, ylf, res)
print(fc)
assert False
## Instruction:
Add assertion for forecast test
## Code After:
import datetime
import numpy as np
from midas import mix
from midas.midas import estimate, forecast
def test_estimate(gdp_data, farmpay_data):
y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1,
start_date=datetime.datetime(1985, 1, 1),
end_date=datetime.datetime(2009, 1, 1))
res = estimate(y, yl, x)
fc = forecast(xf, ylf, res)
print(fc)
assert np.isclose(fc.loc['2011-04-01'][0], 1.336844, rtol=1e-6)
|
import datetime
+ import numpy as np
from midas import mix
from midas.midas import estimate, forecast
def test_estimate(gdp_data, farmpay_data):
y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1,
start_date=datetime.datetime(1985, 1, 1),
end_date=datetime.datetime(2009, 1, 1))
res = estimate(y, yl, x)
fc = forecast(xf, ylf, res)
print(fc)
- assert False
+ assert np.isclose(fc.loc['2011-04-01'][0], 1.336844, rtol=1e-6)
|
fd7eef57a562f2963500d34cbbeb607913b5bb21
|
txircd/modules/extra/extban_registered.py
|
txircd/modules/extra/extban_registered.py
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.refreshUser),
("user-logout", 10, self.refreshUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def refreshUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
rextbans = RExtbans()
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.loginUser),
("user-logout", 10, self.logoutUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def loginUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
def logoutUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
changes = []
for channel in user.channels:
for rank in channel.users[user]:
changes.append((rank, user.nick))
modestr = "-{}".format("".join([mode for mode, param in changes]))
params = [param for mode, param in changes if param is not None]
channel.setModes(self.ircd.serverID, modestr, params)
rextbans = RExtbans()
|
Remove a user's statuses in all channels when they logout
|
Remove a user's statuses in all channels when they logout
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,ElementalAlchemist/txircd
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
- ("user-login", 10, self.refreshUser),
+ ("user-login", 10, self.loginUser),
- ("user-logout", 10, self.refreshUser),
+ ("user-logout", 10, self.logoutUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
- def refreshUser(self, user, donorID=None):
+ def loginUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
+ def logoutUser(self, user, donorID=None):
+ self.ircd.runActionStandard("updateuserbancache", user)
+ changes = []
+ for channel in user.channels:
+ for rank in channel.users[user]:
+ changes.append((rank, user.nick))
+ modestr = "-{}".format("".join([mode for mode, param in changes]))
+ params = [param for mode, param in changes if param is not None]
+ channel.setModes(self.ircd.serverID, modestr, params)
+
rextbans = RExtbans()
|
Remove a user's statuses in all channels when they logout
|
## Code Before:
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.refreshUser),
("user-logout", 10, self.refreshUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def refreshUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
rextbans = RExtbans()
## Instruction:
Remove a user's statuses in all channels when they logout
## Code After:
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.loginUser),
("user-logout", 10, self.logoutUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def loginUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
def logoutUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
changes = []
for channel in user.channels:
for rank in channel.users[user]:
changes.append((rank, user.nick))
modestr = "-{}".format("".join([mode for mode, param in changes]))
params = [param for mode, param in changes if param is not None]
channel.setModes(self.ircd.serverID, modestr, params)
rextbans = RExtbans()
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
- ("user-login", 10, self.refreshUser),
? ^^^^^^^
+ ("user-login", 10, self.loginUser),
? ^^^^^
- ("user-logout", 10, self.refreshUser),
? ^^^^^^^
+ ("user-logout", 10, self.logoutUser),
? ^^^^^^
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
- def refreshUser(self, user, donorID=None):
? ^^^^^^^
+ def loginUser(self, user, donorID=None):
? ^^^^^
self.ircd.runActionStandard("updateuserbancache", user)
+ def logoutUser(self, user, donorID=None):
+ self.ircd.runActionStandard("updateuserbancache", user)
+ changes = []
+ for channel in user.channels:
+ for rank in channel.users[user]:
+ changes.append((rank, user.nick))
+ modestr = "-{}".format("".join([mode for mode, param in changes]))
+ params = [param for mode, param in changes if param is not None]
+ channel.setModes(self.ircd.serverID, modestr, params)
+
rextbans = RExtbans()
|
3544f211913ba67f0bd7e433c23d2e5b22bba719
|
lightcurve_pipeline/database/reset_database.py
|
lightcurve_pipeline/database/reset_database.py
|
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
print 'Resetting database'
base.metadata.drop_all()
base.metadata.create_all()
|
from __future__ import print_function
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
print('Resetting database')
base.metadata.drop_all()
base.metadata.create_all()
|
Change the print statement to use __future__.
|
Change the print statement to use __future__.
|
Python
|
bsd-3-clause
|
justincely/lightcurve_pipeline
|
+
+ from __future__ import print_function
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
- print 'Resetting database'
+ print('Resetting database')
base.metadata.drop_all()
base.metadata.create_all()
|
Change the print statement to use __future__.
|
## Code Before:
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
print 'Resetting database'
base.metadata.drop_all()
base.metadata.create_all()
## Instruction:
Change the print statement to use __future__.
## Code After:
from __future__ import print_function
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
print('Resetting database')
base.metadata.drop_all()
base.metadata.create_all()
|
+
+ from __future__ import print_function
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
- print 'Resetting database'
? ^
+ print('Resetting database')
? ^ +
base.metadata.drop_all()
base.metadata.create_all()
|
28e67e04a88b0195184bf43f013c11ea7f320c4f
|
conveyor/processor.py
|
conveyor/processor.py
|
from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
class BulkProcessor(BaseProcessor):
def process(self):
pass
|
from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
return set(self.client.package_releases(name, True))
else:
return set([version])
class BulkProcessor(BaseProcessor):
def process(self):
pass
|
Add a method for getting a list of releases to fetch
|
Add a method for getting a list of releases to fetch
|
Python
|
bsd-2-clause
|
crateio/carrier
|
from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
+ def get_releases(self, name, version=None):
+ if version is None:
+ return set(self.client.package_releases(name, True))
+ else:
+ return set([version])
+
class BulkProcessor(BaseProcessor):
def process(self):
pass
|
Add a method for getting a list of releases to fetch
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
class BulkProcessor(BaseProcessor):
def process(self):
pass
## Instruction:
Add a method for getting a list of releases to fetch
## Code After:
from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
return set(self.client.package_releases(name, True))
else:
return set([version])
class BulkProcessor(BaseProcessor):
def process(self):
pass
|
from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
+ def get_releases(self, name, version=None):
+ if version is None:
+ return set(self.client.package_releases(name, True))
+ else:
+ return set([version])
+
class BulkProcessor(BaseProcessor):
def process(self):
pass
|
c8db390195641c33f84ccd1f645a5af73debc2bd
|
xapi/tasks.py
|
xapi/tasks.py
|
from celery.task import task
from django.conf import settings
from xapi.sender import TinCanSender
@task
def send_2_tin_can():
options = settings.TRACKING_BACKENDS['xapi']['OPTIONS']
if options.get("SEND_CRON_ENABLED"):
TinCanSender.send_2_tincan_by_settings()
|
from celery.task import task
from django.conf import settings
from xapi.sender import TinCanSender
@task(name='xapi.send_2_tin_can')
def send_2_tin_can():
options = settings.TRACKING_BACKENDS['xapi']['OPTIONS']
TinCanSender.send_2_tincan_by_settings()
|
Add a name to present task in djcelery options
|
Add a name to present task in djcelery options
|
Python
|
agpl-3.0
|
marcore/pok-eco,marcore/pok-eco
|
from celery.task import task
from django.conf import settings
from xapi.sender import TinCanSender
- @task
+ @task(name='xapi.send_2_tin_can')
def send_2_tin_can():
options = settings.TRACKING_BACKENDS['xapi']['OPTIONS']
- if options.get("SEND_CRON_ENABLED"):
- TinCanSender.send_2_tincan_by_settings()
+ TinCanSender.send_2_tincan_by_settings()
|
Add a name to present task in djcelery options
|
## Code Before:
from celery.task import task
from django.conf import settings
from xapi.sender import TinCanSender
@task
def send_2_tin_can():
options = settings.TRACKING_BACKENDS['xapi']['OPTIONS']
if options.get("SEND_CRON_ENABLED"):
TinCanSender.send_2_tincan_by_settings()
## Instruction:
Add a name to present task in djcelery options
## Code After:
from celery.task import task
from django.conf import settings
from xapi.sender import TinCanSender
@task(name='xapi.send_2_tin_can')
def send_2_tin_can():
options = settings.TRACKING_BACKENDS['xapi']['OPTIONS']
TinCanSender.send_2_tincan_by_settings()
|
from celery.task import task
from django.conf import settings
from xapi.sender import TinCanSender
- @task
+ @task(name='xapi.send_2_tin_can')
def send_2_tin_can():
options = settings.TRACKING_BACKENDS['xapi']['OPTIONS']
- if options.get("SEND_CRON_ENABLED"):
- TinCanSender.send_2_tincan_by_settings()
? ----
+ TinCanSender.send_2_tincan_by_settings()
|
6093d2954861f2783da3e5b8473cb13b0469685b
|
elasticquery/filterquery.py
|
elasticquery/filterquery.py
|
import json
from .util import make_dsl_object, unroll_definitions, unroll_struct
class MetaFilterQuery(type):
def __init__(cls, name, bases, d):
super(MetaFilterQuery, cls).__init__(name, bases, d)
unroll_definitions(cls._definitions)
def __getattr__(cls, key):
if key not in cls._definitions:
raise cls._exception(key)
return lambda *args, **kwargs: make_dsl_object(
cls, key, cls._definitions[key],
*args, **kwargs
)
class BaseFilterQuery(object):
_type = None
_struct = None
_dsl_type = None
def __init__(self, dsl_type, struct):
self._struct = struct
self._dsl_type = dsl_type
def dict(self):
return {
self._dsl_type: unroll_struct(self._struct)
}
def __str__(self):
return json.dumps(self.dict(), indent=4)
|
import json
from .util import make_dsl_object, unroll_definitions, unroll_struct
class MetaFilterQuery(type):
def __init__(cls, name, bases, d):
super(MetaFilterQuery, cls).__init__(name, bases, d)
unroll_definitions(cls._definitions)
def __getattr__(cls, key):
if key == '__test__':
return None
if key not in cls._definitions:
raise cls._exception(key)
return lambda *args, **kwargs: make_dsl_object(
cls, key, cls._definitions[key],
*args, **kwargs
)
class BaseFilterQuery(object):
_type = None
_struct = None
_dsl_type = None
def __init__(self, dsl_type, struct):
self._struct = struct
self._dsl_type = dsl_type
def dict(self):
dsl_type = self._dsl_type[:1] if self._dsl_type.endswith('_') else self._dsl_type
return {
dsl_type: unroll_struct(self._struct)
}
def __str__(self):
return json.dumps(self.dict(), indent=4)
|
Support nosetests, handle magic names (and_, or_, etc)
|
Support nosetests, handle magic names (and_, or_, etc)
|
Python
|
mit
|
Fizzadar/ElasticQuery,Fizzadar/ElasticQuery
|
import json
from .util import make_dsl_object, unroll_definitions, unroll_struct
class MetaFilterQuery(type):
def __init__(cls, name, bases, d):
super(MetaFilterQuery, cls).__init__(name, bases, d)
unroll_definitions(cls._definitions)
def __getattr__(cls, key):
+ if key == '__test__':
+ return None
+
if key not in cls._definitions:
raise cls._exception(key)
return lambda *args, **kwargs: make_dsl_object(
cls, key, cls._definitions[key],
*args, **kwargs
)
class BaseFilterQuery(object):
_type = None
_struct = None
_dsl_type = None
def __init__(self, dsl_type, struct):
self._struct = struct
self._dsl_type = dsl_type
def dict(self):
+ dsl_type = self._dsl_type[:1] if self._dsl_type.endswith('_') else self._dsl_type
+
return {
- self._dsl_type: unroll_struct(self._struct)
+ dsl_type: unroll_struct(self._struct)
}
def __str__(self):
return json.dumps(self.dict(), indent=4)
|
Support nosetests, handle magic names (and_, or_, etc)
|
## Code Before:
import json
from .util import make_dsl_object, unroll_definitions, unroll_struct
class MetaFilterQuery(type):
def __init__(cls, name, bases, d):
super(MetaFilterQuery, cls).__init__(name, bases, d)
unroll_definitions(cls._definitions)
def __getattr__(cls, key):
if key not in cls._definitions:
raise cls._exception(key)
return lambda *args, **kwargs: make_dsl_object(
cls, key, cls._definitions[key],
*args, **kwargs
)
class BaseFilterQuery(object):
_type = None
_struct = None
_dsl_type = None
def __init__(self, dsl_type, struct):
self._struct = struct
self._dsl_type = dsl_type
def dict(self):
return {
self._dsl_type: unroll_struct(self._struct)
}
def __str__(self):
return json.dumps(self.dict(), indent=4)
## Instruction:
Support nosetests, handle magic names (and_, or_, etc)
## Code After:
import json
from .util import make_dsl_object, unroll_definitions, unroll_struct
class MetaFilterQuery(type):
def __init__(cls, name, bases, d):
super(MetaFilterQuery, cls).__init__(name, bases, d)
unroll_definitions(cls._definitions)
def __getattr__(cls, key):
if key == '__test__':
return None
if key not in cls._definitions:
raise cls._exception(key)
return lambda *args, **kwargs: make_dsl_object(
cls, key, cls._definitions[key],
*args, **kwargs
)
class BaseFilterQuery(object):
_type = None
_struct = None
_dsl_type = None
def __init__(self, dsl_type, struct):
self._struct = struct
self._dsl_type = dsl_type
def dict(self):
dsl_type = self._dsl_type[:1] if self._dsl_type.endswith('_') else self._dsl_type
return {
dsl_type: unroll_struct(self._struct)
}
def __str__(self):
return json.dumps(self.dict(), indent=4)
|
import json
from .util import make_dsl_object, unroll_definitions, unroll_struct
class MetaFilterQuery(type):
def __init__(cls, name, bases, d):
super(MetaFilterQuery, cls).__init__(name, bases, d)
unroll_definitions(cls._definitions)
def __getattr__(cls, key):
+ if key == '__test__':
+ return None
+
if key not in cls._definitions:
raise cls._exception(key)
return lambda *args, **kwargs: make_dsl_object(
cls, key, cls._definitions[key],
*args, **kwargs
)
class BaseFilterQuery(object):
_type = None
_struct = None
_dsl_type = None
def __init__(self, dsl_type, struct):
self._struct = struct
self._dsl_type = dsl_type
def dict(self):
+ dsl_type = self._dsl_type[:1] if self._dsl_type.endswith('_') else self._dsl_type
+
return {
- self._dsl_type: unroll_struct(self._struct)
? ------
+ dsl_type: unroll_struct(self._struct)
}
def __str__(self):
return json.dumps(self.dict(), indent=4)
|
aaaaa3a143c370f387edf42ebd6b22c924845afa
|
falcom/luhn/check_digit_number.py
|
falcom/luhn/check_digit_number.py
|
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
|
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
def generate_from_int (self, n):
raise NotImplementedError
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
|
Make it clear that the user must implement generate_from_int
|
Make it clear that the user must implement generate_from_int
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
+
+ def generate_from_int (self, n):
+ raise NotImplementedError
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
|
Make it clear that the user must implement generate_from_int
|
## Code Before:
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
## Instruction:
Make it clear that the user must implement generate_from_int
## Code After:
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
def generate_from_int (self, n):
raise NotImplementedError
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
|
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
+
+ def generate_from_int (self, n):
+ raise NotImplementedError
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
|
6e6a5cfb39ae3f6ee9d0cfb30a6a33be06839bfa
|
setup.py
|
setup.py
|
from distutils.core import setup, Extension
setup(
name='mapcode',
ext_modules=[Extension('mapcode',
sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'],
include_dirs=['mapcodelib']
)],
version='0.3',
description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.',
author='Erik Bos',
author_email='[email protected]',
url='https://github.com/mapcode-foundation/mapcode-python',
download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.3',
license='Apache License 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License'
],
)
|
from distutils.core import setup, Extension
setup(
name='mapcode',
ext_modules=[Extension('mapcode',
sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'],
include_dirs=['mapcodelib']
)],
# version number format is clibrary - python
version='0.4',
description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.',
author='Erik Bos',
author_email='[email protected]',
url='https://github.com/mapcode-foundation/mapcode-python',
download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.4',
license='Apache License 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License'
],
)
|
Update module version and tag as v0.4
|
Update module version and tag as v0.4
|
Python
|
apache-2.0
|
mapcode-foundation/mapcode-python,mapcode-foundation/mapcode-python
|
from distutils.core import setup, Extension
setup(
name='mapcode',
ext_modules=[Extension('mapcode',
sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'],
include_dirs=['mapcodelib']
)],
+ # version number format is clibrary - python
- version='0.3',
+ version='0.4',
description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.',
author='Erik Bos',
author_email='[email protected]',
url='https://github.com/mapcode-foundation/mapcode-python',
- download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.3',
+ download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.4',
license='Apache License 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License'
],
)
|
Update module version and tag as v0.4
|
## Code Before:
from distutils.core import setup, Extension
setup(
name='mapcode',
ext_modules=[Extension('mapcode',
sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'],
include_dirs=['mapcodelib']
)],
version='0.3',
description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.',
author='Erik Bos',
author_email='[email protected]',
url='https://github.com/mapcode-foundation/mapcode-python',
download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.3',
license='Apache License 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License'
],
)
## Instruction:
Update module version and tag as v0.4
## Code After:
from distutils.core import setup, Extension
setup(
name='mapcode',
ext_modules=[Extension('mapcode',
sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'],
include_dirs=['mapcodelib']
)],
# version number format is clibrary - python
version='0.4',
description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.',
author='Erik Bos',
author_email='[email protected]',
url='https://github.com/mapcode-foundation/mapcode-python',
download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.4',
license='Apache License 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License'
],
)
|
from distutils.core import setup, Extension
setup(
name='mapcode',
ext_modules=[Extension('mapcode',
sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'],
include_dirs=['mapcodelib']
)],
+ # version number format is clibrary - python
- version='0.3',
? ^
+ version='0.4',
? ^
description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.',
author='Erik Bos',
author_email='[email protected]',
url='https://github.com/mapcode-foundation/mapcode-python',
- download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.3',
? ^
+ download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.4',
? ^
license='Apache License 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License'
],
)
|
b1781b8c82979ee3765197084a9c8e372cb68cf8
|
jazzband/hooks.py
|
jazzband/hooks.py
|
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("member")
def member(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "added" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("repository")
def repository(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "transferred" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
Use new (?) repository transferred hook.
|
Use new (?) repository transferred hook.
|
Python
|
mit
|
jazzband/jazzband-site,jazzband/website,jazzband/website,jazzband/website,jazzband/website,jazzband/site,jazzband/jazzband-site,jazzband/site
|
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
- @hooks.hook("member")
+ @hooks.hook("repository")
- def member(data, guid):
+ def repository(data, guid):
# only if the action is to add a member and if there is repo data
- if data.get("action") == "added" and "repository" in data:
+ if data.get("action") == "transferred" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
Use new (?) repository transferred hook.
|
## Code Before:
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("member")
def member(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "added" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
## Instruction:
Use new (?) repository transferred hook.
## Code After:
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("repository")
def repository(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "transferred" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
- @hooks.hook("member")
+ @hooks.hook("repository")
- def member(data, guid):
? ^ ^^^
+ def repository(data, guid):
? ^ ^^^^^^ +
# only if the action is to add a member and if there is repo data
- if data.get("action") == "added" and "repository" in data:
? ^^
+ if data.get("action") == "transferred" and "repository" in data:
? ++ ^^^^^^
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
c44be6418bbf92121e56bf68d6c8e2ebef483e17
|
script/generate_amalgamation.py
|
script/generate_amalgamation.py
|
import sys
import os.path
import re
INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"')
seen_files = set()
def add_file(filename):
basename = os.path.basename(filename)
# Only include each file at most once.
if basename in seen_files:
return
seen_files.add(basename)
path = os.path.dirname(filename)
with open(filename, 'r') as f:
for line in f:
m = INCLUDE_PATTERN.match(line)
if m:
add_file(os.path.join(path, m.group(1)))
else:
sys.stdout.write(line)
for f in sys.argv[1:]:
add_file(f)
|
import sys
from os.path import basename, dirname, join
import re
INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"')
seen_files = set()
out = sys.stdout
def add_file(filename):
bname = basename(filename)
# Only include each file at most once.
if bname in seen_files:
return
seen_files.add(bname)
path = dirname(filename)
out.write('// Begin file "{0}"\n'.format(filename))
with open(filename, 'r') as f:
for line in f:
m = INCLUDE_PATTERN.match(line)
if m:
add_file(join(path, m.group(1)))
else:
out.write(line)
out.write('// End file "{0}"\n'.format(filename))
for f in sys.argv[1:]:
add_file(f)
|
Add comments for file start/end
|
Add comments for file start/end
|
Python
|
mit
|
Nave-Neel/wren,Nave-Neel/wren,minirop/wren,bigdimboom/wren,foresterre/wren,Nelarius/wren,minirop/wren,Nave-Neel/wren,foresterre/wren,Rohansi/wren,Rohansi/wren,minirop/wren,foresterre/wren,Nave-Neel/wren,minirop/wren,foresterre/wren,foresterre/wren,munificent/wren,bigdimboom/wren,Rohansi/wren,Nelarius/wren,Nelarius/wren,Nelarius/wren,Nelarius/wren,munificent/wren,munificent/wren,minirop/wren,munificent/wren,munificent/wren,bigdimboom/wren,bigdimboom/wren,munificent/wren,Rohansi/wren
|
import sys
- import os.path
+ from os.path import basename, dirname, join
import re
INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"')
seen_files = set()
+ out = sys.stdout
def add_file(filename):
- basename = os.path.basename(filename)
+ bname = basename(filename)
# Only include each file at most once.
- if basename in seen_files:
+ if bname in seen_files:
return
- seen_files.add(basename)
+ seen_files.add(bname)
- path = os.path.dirname(filename)
+ path = dirname(filename)
+ out.write('// Begin file "{0}"\n'.format(filename))
with open(filename, 'r') as f:
for line in f:
m = INCLUDE_PATTERN.match(line)
if m:
- add_file(os.path.join(path, m.group(1)))
+ add_file(join(path, m.group(1)))
else:
- sys.stdout.write(line)
+ out.write(line)
+ out.write('// End file "{0}"\n'.format(filename))
for f in sys.argv[1:]:
add_file(f)
|
Add comments for file start/end
|
## Code Before:
import sys
import os.path
import re
INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"')
seen_files = set()
def add_file(filename):
basename = os.path.basename(filename)
# Only include each file at most once.
if basename in seen_files:
return
seen_files.add(basename)
path = os.path.dirname(filename)
with open(filename, 'r') as f:
for line in f:
m = INCLUDE_PATTERN.match(line)
if m:
add_file(os.path.join(path, m.group(1)))
else:
sys.stdout.write(line)
for f in sys.argv[1:]:
add_file(f)
## Instruction:
Add comments for file start/end
## Code After:
import sys
from os.path import basename, dirname, join
import re
INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"')
seen_files = set()
out = sys.stdout
def add_file(filename):
bname = basename(filename)
# Only include each file at most once.
if bname in seen_files:
return
seen_files.add(bname)
path = dirname(filename)
out.write('// Begin file "{0}"\n'.format(filename))
with open(filename, 'r') as f:
for line in f:
m = INCLUDE_PATTERN.match(line)
if m:
add_file(join(path, m.group(1)))
else:
out.write(line)
out.write('// End file "{0}"\n'.format(filename))
for f in sys.argv[1:]:
add_file(f)
|
import sys
- import os.path
+ from os.path import basename, dirname, join
import re
INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"')
seen_files = set()
+ out = sys.stdout
def add_file(filename):
- basename = os.path.basename(filename)
? --- --------
+ bname = basename(filename)
# Only include each file at most once.
- if basename in seen_files:
? ---
+ if bname in seen_files:
return
- seen_files.add(basename)
? ---
+ seen_files.add(bname)
- path = os.path.dirname(filename)
? --------
+ path = dirname(filename)
+ out.write('// Begin file "{0}"\n'.format(filename))
with open(filename, 'r') as f:
for line in f:
m = INCLUDE_PATTERN.match(line)
if m:
- add_file(os.path.join(path, m.group(1)))
? --------
+ add_file(join(path, m.group(1)))
else:
- sys.stdout.write(line)
? -------
+ out.write(line)
+ out.write('// End file "{0}"\n'.format(filename))
for f in sys.argv[1:]:
add_file(f)
|
56902792b2a7fdd25bd64781e9e98a63db2ee348
|
all/__init__.py
|
all/__init__.py
|
from .help import HyperHelpCommand, HyperHelpNavigateCommand
from .help import HyperHelpListener
###----------------------------------------------------------------------------
|
__version_tuple = (1, 0, 0)
__version__ = ".".join([str(num) for num in __version_tuple])
# These are exposed to Sublime to implement the core of the help system.
from .help import HyperHelpCommand, HyperHelpNavigateCommand
from .help import HyperHelpListener
# These are exposed to packages that may want to interface with the hyperhelp
# core for use in their own packages.
from .operations import package_help_scan
###----------------------------------------------------------------------------
def version():
"""
Get the currently installed version of hyperhelp as a tuple.
"""
return __version_tuple
###----------------------------------------------------------------------------
|
Include a package version number
|
Include a package version number
This includes in the core package the concept of a version number that
underlying code could use to determine what version of the core it is
interfacing with.
This is only really needed for packages that get at the underlying
core code in hyperhelp, which at the moment would only be the
companion HyperHelpAuthor package.
To this end (as an experiment) the code for loading in the help index
files is exposed to anyone that wants to import it as a test for how
this will eventually work. In particular, the idea is to put all of
the symbols meant to be accessible to outside code into the
hyperhelp.all module namespace (whicn is unfortunate but there seems
to be no satisfactory way around it).
|
Python
|
mit
|
OdatNurd/hyperhelp
|
+
+ __version_tuple = (1, 0, 0)
+ __version__ = ".".join([str(num) for num in __version_tuple])
+ # These are exposed to Sublime to implement the core of the help system.
from .help import HyperHelpCommand, HyperHelpNavigateCommand
from .help import HyperHelpListener
+ # These are exposed to packages that may want to interface with the hyperhelp
+ # core for use in their own packages.
+ from .operations import package_help_scan
+
+
###----------------------------------------------------------------------------
+
+ def version():
+ """
+ Get the currently installed version of hyperhelp as a tuple.
+ """
+ return __version_tuple
+
+
+ ###----------------------------------------------------------------------------
+
|
Include a package version number
|
## Code Before:
from .help import HyperHelpCommand, HyperHelpNavigateCommand
from .help import HyperHelpListener
###----------------------------------------------------------------------------
## Instruction:
Include a package version number
## Code After:
__version_tuple = (1, 0, 0)
__version__ = ".".join([str(num) for num in __version_tuple])
# These are exposed to Sublime to implement the core of the help system.
from .help import HyperHelpCommand, HyperHelpNavigateCommand
from .help import HyperHelpListener
# These are exposed to packages that may want to interface with the hyperhelp
# core for use in their own packages.
from .operations import package_help_scan
###----------------------------------------------------------------------------
def version():
"""
Get the currently installed version of hyperhelp as a tuple.
"""
return __version_tuple
###----------------------------------------------------------------------------
|
+
+ __version_tuple = (1, 0, 0)
+ __version__ = ".".join([str(num) for num in __version_tuple])
+ # These are exposed to Sublime to implement the core of the help system.
from .help import HyperHelpCommand, HyperHelpNavigateCommand
from .help import HyperHelpListener
+ # These are exposed to packages that may want to interface with the hyperhelp
+ # core for use in their own packages.
+ from .operations import package_help_scan
+
+
###----------------------------------------------------------------------------
+
+
+ def version():
+ """
+ Get the currently installed version of hyperhelp as a tuple.
+ """
+ return __version_tuple
+
+
+ ###----------------------------------------------------------------------------
|
4b193d9f0c46f91c5a58446e6443d8779e7ca5ce
|
server/plugins/ardinfo/scripts/ard_info.py
|
server/plugins/ardinfo/scripts/ard_info.py
|
import os
import sys
sys.path.append("/usr/local/munki/munkilib")
import FoundationPlist
sys.path.append("/usr/local/sal")
import utils
def main():
ard_path = "/Library/Preferences/com.apple.RemoteDesktop.plist"
if os.path.exists(ard_path):
ard_prefs = FoundationPlist.readPlist(ard_path)
else:
ard_prefs = {}
sal_result_key = "ARD_Info_{}"
prefs_key_prefix = "Text{}"
data = {
sal_result_key.format(i): ard_prefs.get(prefs_key_prefix.format(i), "")
for i in range(1, 5)}
utils.add_plugin_results('ARD_Info', data)
if __name__ == "__main__":
main()
|
import os
import sys
sys.path.append("/usr/local/munki")
from munkilib import FoundationPlist
sys.path.append("/usr/local/sal")
import utils
def main():
ard_path = "/Library/Preferences/com.apple.RemoteDesktop.plist"
if os.path.exists(ard_path):
ard_prefs = FoundationPlist.readPlist(ard_path)
else:
ard_prefs = {}
sal_result_key = "ARD_Info_{}"
prefs_key_prefix = "Text{}"
data = {
sal_result_key.format(i): ard_prefs.get(prefs_key_prefix.format(i), "")
for i in range(1, 5)}
utils.add_plugin_results('ARD_Info', data)
if __name__ == "__main__":
main()
|
Fix name clash over "utils" in ardinfo plugin script.
|
Fix name clash over "utils" in ardinfo plugin script.
|
Python
|
apache-2.0
|
sheagcraig/sal,sheagcraig/sal,salopensource/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,sheagcraig/sal,salopensource/sal
|
import os
import sys
- sys.path.append("/usr/local/munki/munkilib")
+ sys.path.append("/usr/local/munki")
- import FoundationPlist
+ from munkilib import FoundationPlist
sys.path.append("/usr/local/sal")
import utils
def main():
ard_path = "/Library/Preferences/com.apple.RemoteDesktop.plist"
if os.path.exists(ard_path):
ard_prefs = FoundationPlist.readPlist(ard_path)
else:
ard_prefs = {}
sal_result_key = "ARD_Info_{}"
prefs_key_prefix = "Text{}"
data = {
sal_result_key.format(i): ard_prefs.get(prefs_key_prefix.format(i), "")
for i in range(1, 5)}
utils.add_plugin_results('ARD_Info', data)
if __name__ == "__main__":
main()
|
Fix name clash over "utils" in ardinfo plugin script.
|
## Code Before:
import os
import sys
sys.path.append("/usr/local/munki/munkilib")
import FoundationPlist
sys.path.append("/usr/local/sal")
import utils
def main():
ard_path = "/Library/Preferences/com.apple.RemoteDesktop.plist"
if os.path.exists(ard_path):
ard_prefs = FoundationPlist.readPlist(ard_path)
else:
ard_prefs = {}
sal_result_key = "ARD_Info_{}"
prefs_key_prefix = "Text{}"
data = {
sal_result_key.format(i): ard_prefs.get(prefs_key_prefix.format(i), "")
for i in range(1, 5)}
utils.add_plugin_results('ARD_Info', data)
if __name__ == "__main__":
main()
## Instruction:
Fix name clash over "utils" in ardinfo plugin script.
## Code After:
import os
import sys
sys.path.append("/usr/local/munki")
from munkilib import FoundationPlist
sys.path.append("/usr/local/sal")
import utils
def main():
ard_path = "/Library/Preferences/com.apple.RemoteDesktop.plist"
if os.path.exists(ard_path):
ard_prefs = FoundationPlist.readPlist(ard_path)
else:
ard_prefs = {}
sal_result_key = "ARD_Info_{}"
prefs_key_prefix = "Text{}"
data = {
sal_result_key.format(i): ard_prefs.get(prefs_key_prefix.format(i), "")
for i in range(1, 5)}
utils.add_plugin_results('ARD_Info', data)
if __name__ == "__main__":
main()
|
import os
import sys
- sys.path.append("/usr/local/munki/munkilib")
? ---------
+ sys.path.append("/usr/local/munki")
- import FoundationPlist
+ from munkilib import FoundationPlist
? ++++++++++++++
sys.path.append("/usr/local/sal")
import utils
def main():
ard_path = "/Library/Preferences/com.apple.RemoteDesktop.plist"
if os.path.exists(ard_path):
ard_prefs = FoundationPlist.readPlist(ard_path)
else:
ard_prefs = {}
sal_result_key = "ARD_Info_{}"
prefs_key_prefix = "Text{}"
data = {
sal_result_key.format(i): ard_prefs.get(prefs_key_prefix.format(i), "")
for i in range(1, 5)}
utils.add_plugin_results('ARD_Info', data)
if __name__ == "__main__":
main()
|
355b70412f8b725dcf6771967387cf4ba999c98b
|
fetch_configs/syzygy.py
|
fetch_configs/syzygy.py
|
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://chromium.googlesource.com/syzygy',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Update fetch config with new Syzygy location.
|
Update fetch config with new Syzygy location.
Change-Id: Iacc2efd6974f1a161da6e33a0d25d6329fcaaf4f
Reviewed-on: https://chromium-review.googlesource.com/692697
Commit-Queue: Sébastien Marchand <[email protected]>
Reviewed-by: Aaron Gable <[email protected]>
Reviewed-by: Sébastien Marchand <[email protected]>
|
Python
|
bsd-3-clause
|
CoherentLabs/depot_tools,CoherentLabs/depot_tools
|
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
- 'url' : 'https://github.com/google/syzygy.git',
+ 'url' : 'https://chromium.googlesource.com/syzygy',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Update fetch config with new Syzygy location.
|
## Code Before:
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
## Instruction:
Update fetch config with new Syzygy location.
## Code After:
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://chromium.googlesource.com/syzygy',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
- 'url' : 'https://github.com/google/syzygy.git',
? ^^^ ^^^^ ^ ----
+ 'url' : 'https://chromium.googlesource.com/syzygy',
? ^ ^ ^^^^ ++++++++++
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
ded371a8cb63077e57cfcde401df56bddf078f5a
|
project/user/forms.py
|
project/user/forms.py
|
from flask_wtf import Form
from wtforms import TextField, PasswordField
from wtforms.validators import DataRequired, Email, Length, EqualTo
from project.models import User
class LoginForm(Form):
email = TextField('email', validators=[DataRequired(), Email()])
password = PasswordField('password', validators=[DataRequired()])
class RegisterForm(Form):
email = TextField(
'email',
validators=[DataRequired(), Email(message=None), Length(min=6, max=40)])
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
def validate(self):
initial_validation = super(RegisterForm, self).validate()
if not initial_validation:
return False
user = User.query.filter_by(email=self.email.data).first()
if user:
self.email.errors.append("Email already registered")
return False
return True
class ChangePasswordForm(Form):
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
|
from flask_wtf import Form
from wtforms import TextField, PasswordField
from wtforms.validators import DataRequired, Email, Length, EqualTo
from project.models import User
class LoginForm(Form):
email = TextField('email', validators=[DataRequired(), Email()])
password = PasswordField('password', validators=[DataRequired()])
class RegisterForm(Form):
email = TextField(
'email',
validators=[DataRequired(), Email(message=None), Length(min=6, max=40)])
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
def validate(self):
initial_validation = super(RegisterForm, self).validate()
if not initial_validation:
return False
user = User.query.filter_by(email=self.email.data).first()
if user:
self.email.errors.append("Email already registered")
return False
return True
class ResetPasswordForm(Form):
email = TextField(
'email', validators=[DataRequired(), Email(message=None), Length(min=6, max=40)])
class ChangePasswordForm(Form):
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
|
Create basic password reset form
|
Create basic password reset form
|
Python
|
mit
|
dylanshine/streamschool,dylanshine/streamschool
|
from flask_wtf import Form
from wtforms import TextField, PasswordField
from wtforms.validators import DataRequired, Email, Length, EqualTo
from project.models import User
class LoginForm(Form):
email = TextField('email', validators=[DataRequired(), Email()])
password = PasswordField('password', validators=[DataRequired()])
class RegisterForm(Form):
email = TextField(
'email',
validators=[DataRequired(), Email(message=None), Length(min=6, max=40)])
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
def validate(self):
initial_validation = super(RegisterForm, self).validate()
if not initial_validation:
return False
user = User.query.filter_by(email=self.email.data).first()
if user:
self.email.errors.append("Email already registered")
return False
return True
+ class ResetPasswordForm(Form):
+ email = TextField(
+ 'email', validators=[DataRequired(), Email(message=None), Length(min=6, max=40)])
+
+
class ChangePasswordForm(Form):
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
|
Create basic password reset form
|
## Code Before:
from flask_wtf import Form
from wtforms import TextField, PasswordField
from wtforms.validators import DataRequired, Email, Length, EqualTo
from project.models import User
class LoginForm(Form):
email = TextField('email', validators=[DataRequired(), Email()])
password = PasswordField('password', validators=[DataRequired()])
class RegisterForm(Form):
email = TextField(
'email',
validators=[DataRequired(), Email(message=None), Length(min=6, max=40)])
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
def validate(self):
initial_validation = super(RegisterForm, self).validate()
if not initial_validation:
return False
user = User.query.filter_by(email=self.email.data).first()
if user:
self.email.errors.append("Email already registered")
return False
return True
class ChangePasswordForm(Form):
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
## Instruction:
Create basic password reset form
## Code After:
from flask_wtf import Form
from wtforms import TextField, PasswordField
from wtforms.validators import DataRequired, Email, Length, EqualTo
from project.models import User
class LoginForm(Form):
email = TextField('email', validators=[DataRequired(), Email()])
password = PasswordField('password', validators=[DataRequired()])
class RegisterForm(Form):
email = TextField(
'email',
validators=[DataRequired(), Email(message=None), Length(min=6, max=40)])
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
def validate(self):
initial_validation = super(RegisterForm, self).validate()
if not initial_validation:
return False
user = User.query.filter_by(email=self.email.data).first()
if user:
self.email.errors.append("Email already registered")
return False
return True
class ResetPasswordForm(Form):
email = TextField(
'email', validators=[DataRequired(), Email(message=None), Length(min=6, max=40)])
class ChangePasswordForm(Form):
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
|
from flask_wtf import Form
from wtforms import TextField, PasswordField
from wtforms.validators import DataRequired, Email, Length, EqualTo
from project.models import User
class LoginForm(Form):
email = TextField('email', validators=[DataRequired(), Email()])
password = PasswordField('password', validators=[DataRequired()])
class RegisterForm(Form):
email = TextField(
'email',
validators=[DataRequired(), Email(message=None), Length(min=6, max=40)])
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
def validate(self):
initial_validation = super(RegisterForm, self).validate()
if not initial_validation:
return False
user = User.query.filter_by(email=self.email.data).first()
if user:
self.email.errors.append("Email already registered")
return False
return True
+ class ResetPasswordForm(Form):
+ email = TextField(
+ 'email', validators=[DataRequired(), Email(message=None), Length(min=6, max=40)])
+
+
class ChangePasswordForm(Form):
password = PasswordField(
'password',
validators=[DataRequired(), Length(min=6, max=25)]
)
confirm = PasswordField(
'Repeat password',
validators=[
DataRequired(),
EqualTo('password', message='Passwords must match.')
]
)
|
22b697729d1ee43d322aa1187b3a5f6101f836a5
|
odin/__init__.py
|
odin/__init__.py
|
__authors__ = "Tim Savage"
__author_email__ = "[email protected]"
__copyright__ = "Copyright (C) 2014 Tim Savage"
__version__ = "1.0"
# Disable logging if an explicit handler is not added
try:
import logging
logging.getLogger('odin').addHandler(logging.NullHandler())
except AttributeError:
pass # Fallback for python 2.6
from odin.fields import * # noqa
from odin.fields.composite import * # noqa
from odin.fields.virtual import * # noqa
from odin.mapping import * # noqa
from odin.resources import Resource # noqa
from odin.adapters import ResourceAdapter # noqa
|
import logging
logging.getLogger('odin.registration').addHandler(logging.NullHandler())
__authors__ = "Tim Savage"
__author_email__ = "[email protected]"
__copyright__ = "Copyright (C) 2014 Tim Savage"
__version__ = "1.0"
from odin.fields import * # noqa
from odin.fields.composite import * # noqa
from odin.fields.virtual import * # noqa
from odin.mapping import * # noqa
from odin.resources import Resource # noqa
from odin.adapters import ResourceAdapter # noqa
|
Remove Python 2.6 backwards compatibility
|
Remove Python 2.6 backwards compatibility
|
Python
|
bsd-3-clause
|
python-odin/odin
|
+ import logging
+ logging.getLogger('odin.registration').addHandler(logging.NullHandler())
+
__authors__ = "Tim Savage"
__author_email__ = "[email protected]"
__copyright__ = "Copyright (C) 2014 Tim Savage"
__version__ = "1.0"
-
- # Disable logging if an explicit handler is not added
- try:
- import logging
- logging.getLogger('odin').addHandler(logging.NullHandler())
- except AttributeError:
- pass # Fallback for python 2.6
from odin.fields import * # noqa
from odin.fields.composite import * # noqa
from odin.fields.virtual import * # noqa
from odin.mapping import * # noqa
from odin.resources import Resource # noqa
from odin.adapters import ResourceAdapter # noqa
|
Remove Python 2.6 backwards compatibility
|
## Code Before:
__authors__ = "Tim Savage"
__author_email__ = "[email protected]"
__copyright__ = "Copyright (C) 2014 Tim Savage"
__version__ = "1.0"
# Disable logging if an explicit handler is not added
try:
import logging
logging.getLogger('odin').addHandler(logging.NullHandler())
except AttributeError:
pass # Fallback for python 2.6
from odin.fields import * # noqa
from odin.fields.composite import * # noqa
from odin.fields.virtual import * # noqa
from odin.mapping import * # noqa
from odin.resources import Resource # noqa
from odin.adapters import ResourceAdapter # noqa
## Instruction:
Remove Python 2.6 backwards compatibility
## Code After:
import logging
logging.getLogger('odin.registration').addHandler(logging.NullHandler())
__authors__ = "Tim Savage"
__author_email__ = "[email protected]"
__copyright__ = "Copyright (C) 2014 Tim Savage"
__version__ = "1.0"
from odin.fields import * # noqa
from odin.fields.composite import * # noqa
from odin.fields.virtual import * # noqa
from odin.mapping import * # noqa
from odin.resources import Resource # noqa
from odin.adapters import ResourceAdapter # noqa
|
+ import logging
+ logging.getLogger('odin.registration').addHandler(logging.NullHandler())
+
__authors__ = "Tim Savage"
__author_email__ = "[email protected]"
__copyright__ = "Copyright (C) 2014 Tim Savage"
__version__ = "1.0"
-
- # Disable logging if an explicit handler is not added
- try:
- import logging
- logging.getLogger('odin').addHandler(logging.NullHandler())
- except AttributeError:
- pass # Fallback for python 2.6
from odin.fields import * # noqa
from odin.fields.composite import * # noqa
from odin.fields.virtual import * # noqa
from odin.mapping import * # noqa
from odin.resources import Resource # noqa
from odin.adapters import ResourceAdapter # noqa
|
b097675e5906f7b0e9c050110fea58e40491814b
|
music/api.py
|
music/api.py
|
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
|
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
filtering = {
'last_played': ALL
}
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
|
Allow filtering and ordering on API
|
Allow filtering and ordering on API
|
Python
|
bsd-3-clause
|
praekelt/jmbo-music,praekelt/jmbo-music
|
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
+ from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
+ filtering = {
+ 'last_played': ALL
+ }
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
|
Allow filtering and ordering on API
|
## Code Before:
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
## Instruction:
Allow filtering and ordering on API
## Code After:
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
filtering = {
'last_played': ALL
}
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
|
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
+ from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
+ filtering = {
+ 'last_played': ALL
+ }
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
|
ce9f5551ec7173cc132eb1271e0fc2c1bbfaa7ce
|
apps/worker/src/main/core/node.py
|
apps/worker/src/main/core/node.py
|
from syft.core.node.vm.vm import VirtualMachine
node = VirtualMachine(name="om-vm")
|
from syft.core.node.device.device import Device
from syft.grid.services.vm_management_service import CreateVMService
node = Device(name="om-device")
node.immediate_services_with_reply.append(CreateVMService)
node._register_services() # re-register all services including SignalingService
|
ADD CreateVMService at Device APP
|
ADD CreateVMService at Device APP
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
- from syft.core.node.vm.vm import VirtualMachine
+ from syft.core.node.device.device import Device
+ from syft.grid.services.vm_management_service import CreateVMService
- node = VirtualMachine(name="om-vm")
+ node = Device(name="om-device")
+ node.immediate_services_with_reply.append(CreateVMService)
+ node._register_services() # re-register all services including SignalingService
|
ADD CreateVMService at Device APP
|
## Code Before:
from syft.core.node.vm.vm import VirtualMachine
node = VirtualMachine(name="om-vm")
## Instruction:
ADD CreateVMService at Device APP
## Code After:
from syft.core.node.device.device import Device
from syft.grid.services.vm_management_service import CreateVMService
node = Device(name="om-device")
node.immediate_services_with_reply.append(CreateVMService)
node._register_services() # re-register all services including SignalingService
|
- from syft.core.node.vm.vm import VirtualMachine
+ from syft.core.node.device.device import Device
+ from syft.grid.services.vm_management_service import CreateVMService
- node = VirtualMachine(name="om-vm")
+ node = Device(name="om-device")
+ node.immediate_services_with_reply.append(CreateVMService)
+ node._register_services() # re-register all services including SignalingService
|
f61b81e968384859eb51a2ff14ca7709e8322ae8
|
yunity/walls/models.py
|
yunity/walls/models.py
|
from django.db.models import ForeignKey, TextField
from config import settings
from yunity.base.models import BaseModel
class Wall(BaseModel):
pass
class WallPost(BaseModel):
wall = ForeignKey(Wall)
author = ForeignKey(settings.AUTH_USER_MODEL)
class WallPostContent(BaseModel):
post = ForeignKey(WallPost)
author = ForeignKey(settings.AUTH_USER_MODEL)
body = TextField()
|
from django.db.models import ForeignKey, TextField
from config import settings
from yunity.base.models import BaseModel
class Wall(BaseModel):
def resolve_permissions(self, collector):
h = self.hub
if h.target_content_type.model == 'group':
g = h.target
""":type : Group"""
collector.add_hub(h, 'read')
if g.is_content_included_in_parent:
g = g.parent
while g:
collector.add_hub(g.hub, 'read')
g = g.parent
class WallPost(BaseModel):
wall = ForeignKey(Wall)
author = ForeignKey(settings.AUTH_USER_MODEL)
class WallPostContent(BaseModel):
post = ForeignKey(WallPost)
author = ForeignKey(settings.AUTH_USER_MODEL)
body = TextField()
|
Implement basic permissions resolver for walls
|
Implement basic permissions resolver for walls
To be seen as a poc, collect all hub permissions for a basic permission
and settings/inheritance model for reading a wall.
with @nicksellen
|
Python
|
agpl-3.0
|
yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend
|
from django.db.models import ForeignKey, TextField
from config import settings
from yunity.base.models import BaseModel
class Wall(BaseModel):
- pass
+ def resolve_permissions(self, collector):
+ h = self.hub
+ if h.target_content_type.model == 'group':
+ g = h.target
+ """:type : Group"""
+ collector.add_hub(h, 'read')
+ if g.is_content_included_in_parent:
+ g = g.parent
+ while g:
+ collector.add_hub(g.hub, 'read')
+ g = g.parent
class WallPost(BaseModel):
wall = ForeignKey(Wall)
author = ForeignKey(settings.AUTH_USER_MODEL)
class WallPostContent(BaseModel):
post = ForeignKey(WallPost)
author = ForeignKey(settings.AUTH_USER_MODEL)
body = TextField()
|
Implement basic permissions resolver for walls
|
## Code Before:
from django.db.models import ForeignKey, TextField
from config import settings
from yunity.base.models import BaseModel
class Wall(BaseModel):
pass
class WallPost(BaseModel):
wall = ForeignKey(Wall)
author = ForeignKey(settings.AUTH_USER_MODEL)
class WallPostContent(BaseModel):
post = ForeignKey(WallPost)
author = ForeignKey(settings.AUTH_USER_MODEL)
body = TextField()
## Instruction:
Implement basic permissions resolver for walls
## Code After:
from django.db.models import ForeignKey, TextField
from config import settings
from yunity.base.models import BaseModel
class Wall(BaseModel):
def resolve_permissions(self, collector):
h = self.hub
if h.target_content_type.model == 'group':
g = h.target
""":type : Group"""
collector.add_hub(h, 'read')
if g.is_content_included_in_parent:
g = g.parent
while g:
collector.add_hub(g.hub, 'read')
g = g.parent
class WallPost(BaseModel):
wall = ForeignKey(Wall)
author = ForeignKey(settings.AUTH_USER_MODEL)
class WallPostContent(BaseModel):
post = ForeignKey(WallPost)
author = ForeignKey(settings.AUTH_USER_MODEL)
body = TextField()
|
from django.db.models import ForeignKey, TextField
from config import settings
from yunity.base.models import BaseModel
class Wall(BaseModel):
- pass
+ def resolve_permissions(self, collector):
+ h = self.hub
+ if h.target_content_type.model == 'group':
+ g = h.target
+ """:type : Group"""
+ collector.add_hub(h, 'read')
+ if g.is_content_included_in_parent:
+ g = g.parent
+ while g:
+ collector.add_hub(g.hub, 'read')
+ g = g.parent
class WallPost(BaseModel):
wall = ForeignKey(Wall)
author = ForeignKey(settings.AUTH_USER_MODEL)
class WallPostContent(BaseModel):
post = ForeignKey(WallPost)
author = ForeignKey(settings.AUTH_USER_MODEL)
body = TextField()
|
e90c7d034f070361893f77d7a257640d647be0c7
|
mbuild/tests/test_xyz.py
|
mbuild/tests/test_xyz.py
|
import numpy as np
import pytest
import mbuild as mb
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
class TestXYZ(BaseTest):
def test_load_no_top(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert ethane_in.n_bonds == 0
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_wrong_n_atoms(self):
with pytest.raises(MBuildError):
mb.load(get_fn('too_few_atoms.xyz'))
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_coordinates(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert np.allclose(ethane.xyz, ethane_in.xyz)
|
import numpy as np
import pytest
import mbuild as mb
from mbuild.formats.xyz import write_xyz
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
class TestXYZ(BaseTest):
def test_load_no_top(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert ethane_in.n_bonds == 0
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_wrong_n_atoms(self):
with pytest.raises(MBuildError):
mb.load(get_fn('too_few_atoms.xyz'))
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
def test_bad_input(self, ethane):
with pytest.raises(ValueError):
assert isinstance(ethane, mb.Compound)
write_xyz(ethane, 'compound.xyz')
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_coordinates(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert np.allclose(ethane.xyz, ethane_in.xyz)
|
Add test to ensure write_xyz does not directly take in compound
|
Add test to ensure write_xyz does not directly take in compound
|
Python
|
mit
|
iModels/mbuild,iModels/mbuild
|
import numpy as np
import pytest
import mbuild as mb
+ from mbuild.formats.xyz import write_xyz
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
class TestXYZ(BaseTest):
def test_load_no_top(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert ethane_in.n_bonds == 0
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_wrong_n_atoms(self):
with pytest.raises(MBuildError):
mb.load(get_fn('too_few_atoms.xyz'))
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
+ def test_bad_input(self, ethane):
+ with pytest.raises(ValueError):
+ assert isinstance(ethane, mb.Compound)
+ write_xyz(ethane, 'compound.xyz')
+
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_coordinates(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert np.allclose(ethane.xyz, ethane_in.xyz)
|
Add test to ensure write_xyz does not directly take in compound
|
## Code Before:
import numpy as np
import pytest
import mbuild as mb
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
class TestXYZ(BaseTest):
def test_load_no_top(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert ethane_in.n_bonds == 0
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_wrong_n_atoms(self):
with pytest.raises(MBuildError):
mb.load(get_fn('too_few_atoms.xyz'))
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_coordinates(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert np.allclose(ethane.xyz, ethane_in.xyz)
## Instruction:
Add test to ensure write_xyz does not directly take in compound
## Code After:
import numpy as np
import pytest
import mbuild as mb
from mbuild.formats.xyz import write_xyz
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
class TestXYZ(BaseTest):
def test_load_no_top(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert ethane_in.n_bonds == 0
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_wrong_n_atoms(self):
with pytest.raises(MBuildError):
mb.load(get_fn('too_few_atoms.xyz'))
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
def test_bad_input(self, ethane):
with pytest.raises(ValueError):
assert isinstance(ethane, mb.Compound)
write_xyz(ethane, 'compound.xyz')
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_coordinates(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert np.allclose(ethane.xyz, ethane_in.xyz)
|
import numpy as np
import pytest
import mbuild as mb
+ from mbuild.formats.xyz import write_xyz
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
class TestXYZ(BaseTest):
def test_load_no_top(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert ethane_in.n_bonds == 0
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_wrong_n_atoms(self):
with pytest.raises(MBuildError):
mb.load(get_fn('too_few_atoms.xyz'))
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
+ def test_bad_input(self, ethane):
+ with pytest.raises(ValueError):
+ assert isinstance(ethane, mb.Compound)
+ write_xyz(ethane, 'compound.xyz')
+
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_coordinates(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert np.allclose(ethane.xyz, ethane_in.xyz)
|
a5f9abc3f2de2bca89a5c3e5c35a8d7e223be4dd
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='nipype-pbs-workflows',
version='1.0',
author='https://www.ctsi.ufl.edu/research/study-development/informatics-consulting/',
author_email='[email protected]',
description='Neuroimaging workflows writtten in nipype with a focus on PBS job scheduler',
long_description=open('README.md').read(),
url='https://github.com/ctsit/nipype-pbs-workflows',
package_dir={'': 'nipype-pbs-workflows'},
packages=[''],
)
|
from distutils.core import setup
setup(
name='nipype-pbs-workflows',
version='1.0.1',
author='https://www.ctsi.ufl.edu/research/study-development/informatics-consulting/',
author_email='[email protected]',
description='Neuroimaging workflows writtten in nipype with a focus on PBS job scheduler',
long_description=open('README.md').read(),
url='https://github.com/ctsit/nipype-pbs-workflows',
packages=['nipype-pbs-workflows'],
package_dir={'nipype-pbs-workflows': 'src'},
scripts=['src/bedpostx.py', 'src/dcm2niiconverter.py'],
)
|
Install scripts into the correct location
|
Install scripts into the correct location
|
Python
|
bsd-3-clause
|
ctsit/nipype-pbs-workflows
|
from distutils.core import setup
setup(
- name='nipype-pbs-workflows',
+ name='nipype-pbs-workflows',
- version='1.0',
+ version='1.0.1',
author='https://www.ctsi.ufl.edu/research/study-development/informatics-consulting/',
author_email='[email protected]',
description='Neuroimaging workflows writtten in nipype with a focus on PBS job scheduler',
long_description=open('README.md').read(),
url='https://github.com/ctsit/nipype-pbs-workflows',
+ packages=['nipype-pbs-workflows'],
- package_dir={'': 'nipype-pbs-workflows'},
+ package_dir={'nipype-pbs-workflows': 'src'},
- packages=[''],
+ scripts=['src/bedpostx.py', 'src/dcm2niiconverter.py'],
)
|
Install scripts into the correct location
|
## Code Before:
from distutils.core import setup
setup(
name='nipype-pbs-workflows',
version='1.0',
author='https://www.ctsi.ufl.edu/research/study-development/informatics-consulting/',
author_email='[email protected]',
description='Neuroimaging workflows writtten in nipype with a focus on PBS job scheduler',
long_description=open('README.md').read(),
url='https://github.com/ctsit/nipype-pbs-workflows',
package_dir={'': 'nipype-pbs-workflows'},
packages=[''],
)
## Instruction:
Install scripts into the correct location
## Code After:
from distutils.core import setup
setup(
name='nipype-pbs-workflows',
version='1.0.1',
author='https://www.ctsi.ufl.edu/research/study-development/informatics-consulting/',
author_email='[email protected]',
description='Neuroimaging workflows writtten in nipype with a focus on PBS job scheduler',
long_description=open('README.md').read(),
url='https://github.com/ctsit/nipype-pbs-workflows',
packages=['nipype-pbs-workflows'],
package_dir={'nipype-pbs-workflows': 'src'},
scripts=['src/bedpostx.py', 'src/dcm2niiconverter.py'],
)
|
from distutils.core import setup
setup(
- name='nipype-pbs-workflows',
? ^
+ name='nipype-pbs-workflows',
? ^^^^
- version='1.0',
+ version='1.0.1',
? ++
author='https://www.ctsi.ufl.edu/research/study-development/informatics-consulting/',
author_email='[email protected]',
description='Neuroimaging workflows writtten in nipype with a focus on PBS job scheduler',
long_description=open('README.md').read(),
url='https://github.com/ctsit/nipype-pbs-workflows',
+ packages=['nipype-pbs-workflows'],
- package_dir={'': 'nipype-pbs-workflows'},
? ----
+ package_dir={'nipype-pbs-workflows': 'src'},
? +++++++
- packages=[''],
+ scripts=['src/bedpostx.py', 'src/dcm2niiconverter.py'],
)
|
f3fa16aeee901f7ee1438bbc5b12170f82a184bc
|
project/api/management/commands/song_titles.py
|
project/api/management/commands/song_titles.py
|
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from openpyxl import Workbook
# First-Party
from api.models import Chart
class Command(BaseCommand):
help = "Command to sync database with BHS ."
def handle(self, *args, **options):
self.stdout.write("Sending song title report...")
wb = Workbook()
ws = wb.active
fieldnames = [
'title',
]
ws.append(fieldnames)
charts = Chart.objects.all().distinct(
'title'
).order_by(
'title'
)
for chart in charts:
title = chart.title.strip()
row = [
title,
]
ws.append(row)
wb.save('song_title_report.xlsx')
message = EmailMessage(
subject='Song Title Report',
body='Song Title Report Attached',
from_email='[email protected]',
to=['[email protected]', '[email protected]']
)
message.attach_file('song_title_report.xlsx')
result = message.send()
if result == 1:
self.stdout.write("Sent.")
else:
self.stdout.write("Error. Not sent.")
|
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from openpyxl import Workbook
# First-Party
from api.models import Chart
class Command(BaseCommand):
help = "Command to sync database with BHS ."
def handle(self, *args, **options):
self.stdout.write("Sending song title report...")
wb = Workbook()
ws = wb.active
fieldnames = [
'title',
]
ws.append(fieldnames)
charts = Chart.objects.all().distinct(
'title'
).order_by(
'title'
)
for chart in charts:
title = chart.title.strip()
row = [
title,
]
ws.append(row)
wb.save('song_title_report.xlsx')
message = EmailMessage(
subject='Song Title Report',
body='Song Title Report Attached',
from_email='[email protected]',
to=['[email protected]', ]
)
message.attach_file('song_title_report.xlsx')
result = message.send()
if result == 1:
self.stdout.write("Sent.")
else:
self.stdout.write("Error. Not sent.")
|
Remove me from song title report send
|
Remove me from song title report send
|
Python
|
bsd-2-clause
|
dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api
|
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from openpyxl import Workbook
# First-Party
from api.models import Chart
class Command(BaseCommand):
help = "Command to sync database with BHS ."
def handle(self, *args, **options):
self.stdout.write("Sending song title report...")
wb = Workbook()
ws = wb.active
fieldnames = [
'title',
]
ws.append(fieldnames)
charts = Chart.objects.all().distinct(
'title'
).order_by(
'title'
)
for chart in charts:
title = chart.title.strip()
row = [
title,
]
ws.append(row)
wb.save('song_title_report.xlsx')
message = EmailMessage(
subject='Song Title Report',
body='Song Title Report Attached',
from_email='[email protected]',
- to=['[email protected]', '[email protected]']
+ to=['[email protected]', ]
)
message.attach_file('song_title_report.xlsx')
result = message.send()
if result == 1:
self.stdout.write("Sent.")
else:
self.stdout.write("Error. Not sent.")
|
Remove me from song title report send
|
## Code Before:
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from openpyxl import Workbook
# First-Party
from api.models import Chart
class Command(BaseCommand):
help = "Command to sync database with BHS ."
def handle(self, *args, **options):
self.stdout.write("Sending song title report...")
wb = Workbook()
ws = wb.active
fieldnames = [
'title',
]
ws.append(fieldnames)
charts = Chart.objects.all().distinct(
'title'
).order_by(
'title'
)
for chart in charts:
title = chart.title.strip()
row = [
title,
]
ws.append(row)
wb.save('song_title_report.xlsx')
message = EmailMessage(
subject='Song Title Report',
body='Song Title Report Attached',
from_email='[email protected]',
to=['[email protected]', '[email protected]']
)
message.attach_file('song_title_report.xlsx')
result = message.send()
if result == 1:
self.stdout.write("Sent.")
else:
self.stdout.write("Error. Not sent.")
## Instruction:
Remove me from song title report send
## Code After:
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from openpyxl import Workbook
# First-Party
from api.models import Chart
class Command(BaseCommand):
help = "Command to sync database with BHS ."
def handle(self, *args, **options):
self.stdout.write("Sending song title report...")
wb = Workbook()
ws = wb.active
fieldnames = [
'title',
]
ws.append(fieldnames)
charts = Chart.objects.all().distinct(
'title'
).order_by(
'title'
)
for chart in charts:
title = chart.title.strip()
row = [
title,
]
ws.append(row)
wb.save('song_title_report.xlsx')
message = EmailMessage(
subject='Song Title Report',
body='Song Title Report Attached',
from_email='[email protected]',
to=['[email protected]', ]
)
message.attach_file('song_title_report.xlsx')
result = message.send()
if result == 1:
self.stdout.write("Sent.")
else:
self.stdout.write("Error. Not sent.")
|
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from openpyxl import Workbook
# First-Party
from api.models import Chart
class Command(BaseCommand):
help = "Command to sync database with BHS ."
def handle(self, *args, **options):
self.stdout.write("Sending song title report...")
wb = Workbook()
ws = wb.active
fieldnames = [
'title',
]
ws.append(fieldnames)
charts = Chart.objects.all().distinct(
'title'
).order_by(
'title'
)
for chart in charts:
title = chart.title.strip()
row = [
title,
]
ws.append(row)
wb.save('song_title_report.xlsx')
message = EmailMessage(
subject='Song Title Report',
body='Song Title Report Attached',
from_email='[email protected]',
- to=['[email protected]', '[email protected]']
? --------------------
+ to=['[email protected]', ]
)
message.attach_file('song_title_report.xlsx')
result = message.send()
if result == 1:
self.stdout.write("Sent.")
else:
self.stdout.write("Error. Not sent.")
|
e890ac9ef00193beac77b757c62911553cebf656
|
test.py
|
test.py
|
import urllib
urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', '/home/pi/img/img.jpg')
|
import urllib
urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', 'img.jpg')
|
Change save path to local path
|
Change save path to local path
|
Python
|
mit
|
adampiskorski/lpr_poc
|
import urllib
- urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', '/home/pi/img/img.jpg')
+ urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', 'img.jpg')
|
Change save path to local path
|
## Code Before:
import urllib
urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', '/home/pi/img/img.jpg')
## Instruction:
Change save path to local path
## Code After:
import urllib
urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', 'img.jpg')
|
import urllib
- urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', '/home/pi/img/img.jpg')
? -------------
+ urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', 'img.jpg')
|
24eb92088115a4cd583a3dc759083ff295db3135
|
website/jdpages/signals.py
|
website/jdpages/signals.py
|
import logging
logger = logging.getLogger(__name__)
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from django.dispatch import receiver
from mezzanine.blog.models import BlogCategory
from website.jdpages.models import ColumnElement
@receiver(post_save)
def post_save_callback(sender, instance, created, **kwargs):
"""
Called after a model instance is saved.
Created related database objects for some
Arguments:
sender -- the model class
instance -- the actual instance being saved
created -- a boolean; True if a new record was created
"""
if not created:
return
if sender == BlogCategory:
if ColumnElement.objects.filter(object_id=instance.id, content_type=ContentType.objects.get_for_model(sender)):
return
blog_category = instance
blog_category_element = ColumnElement()
blog_category_element.title = blog_category.title
blog_category_element.content_type = ContentType.objects.get_for_model(BlogCategory)
blog_category_element.object_id = blog_category.id
blog_category_element.save()
blog_category_element.site_id = instance.site_id
blog_category_element.save(update_site=False)
return
|
import logging
logger = logging.getLogger(__name__)
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from mezzanine.blog.models import BlogCategory
from website.jdpages.models import ColumnElement
@receiver(post_save)
def post_save_callback(sender, instance, created, **kwargs):
"""
Called after a model instance is saved.
Create related models here.
Arguments:
sender -- the model class
instance -- the actual instance being saved
created -- a boolean; True if a new record was created
"""
if not created:
return
if sender == BlogCategory:
if ColumnElement.objects.filter(object_id=instance.id, content_type=ContentType.objects.get_for_model(sender)):
return
blog_category = instance
blog_category_element = ColumnElement()
blog_category_element.title = blog_category.title
blog_category_element.content_type = ContentType.objects.get_for_model(BlogCategory)
blog_category_element.object_id = blog_category.id
blog_category_element.save()
blog_category_element.site_id = instance.site_id
blog_category_element.save(update_site=False)
return
@receiver(pre_delete)
def pre_delete_callback(sender, instance, **kwargs):
"""
Called just before a model is deleted.
Delete related models here.
Arguments:
sender -- the model class
instance -- the actual instance being saved
"""
if sender == BlogCategory:
related_elements = ColumnElement.objects.filter(object_id=instance.id, content_type=ContentType.objects.get_for_model(sender))
if related_elements:
for element in related_elements:
element.delete()
return
|
Delete the related ColumnElement when a BlogCategory is deleted.
|
Delete the related ColumnElement when a BlogCategory is deleted.
|
Python
|
mit
|
jonge-democraten/website,jonge-democraten/website,jonge-democraten/website,jonge-democraten/website
|
import logging
logger = logging.getLogger(__name__)
from django.contrib.contenttypes.models import ContentType
- from django.db.models.signals import post_save
+ from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from mezzanine.blog.models import BlogCategory
from website.jdpages.models import ColumnElement
@receiver(post_save)
def post_save_callback(sender, instance, created, **kwargs):
"""
Called after a model instance is saved.
- Created related database objects for some
+ Create related models here.
Arguments:
sender -- the model class
instance -- the actual instance being saved
created -- a boolean; True if a new record was created
"""
if not created:
return
if sender == BlogCategory:
if ColumnElement.objects.filter(object_id=instance.id, content_type=ContentType.objects.get_for_model(sender)):
return
blog_category = instance
blog_category_element = ColumnElement()
blog_category_element.title = blog_category.title
blog_category_element.content_type = ContentType.objects.get_for_model(BlogCategory)
blog_category_element.object_id = blog_category.id
blog_category_element.save()
blog_category_element.site_id = instance.site_id
blog_category_element.save(update_site=False)
return
+ @receiver(pre_delete)
+ def pre_delete_callback(sender, instance, **kwargs):
+ """
+ Called just before a model is deleted.
+ Delete related models here.
+ Arguments:
+ sender -- the model class
+ instance -- the actual instance being saved
+ """
+ if sender == BlogCategory:
+ related_elements = ColumnElement.objects.filter(object_id=instance.id, content_type=ContentType.objects.get_for_model(sender))
+ if related_elements:
+ for element in related_elements:
+ element.delete()
+ return
+
|
Delete the related ColumnElement when a BlogCategory is deleted.
|
## Code Before:
import logging
logger = logging.getLogger(__name__)
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from django.dispatch import receiver
from mezzanine.blog.models import BlogCategory
from website.jdpages.models import ColumnElement
@receiver(post_save)
def post_save_callback(sender, instance, created, **kwargs):
"""
Called after a model instance is saved.
Created related database objects for some
Arguments:
sender -- the model class
instance -- the actual instance being saved
created -- a boolean; True if a new record was created
"""
if not created:
return
if sender == BlogCategory:
if ColumnElement.objects.filter(object_id=instance.id, content_type=ContentType.objects.get_for_model(sender)):
return
blog_category = instance
blog_category_element = ColumnElement()
blog_category_element.title = blog_category.title
blog_category_element.content_type = ContentType.objects.get_for_model(BlogCategory)
blog_category_element.object_id = blog_category.id
blog_category_element.save()
blog_category_element.site_id = instance.site_id
blog_category_element.save(update_site=False)
return
## Instruction:
Delete the related ColumnElement when a BlogCategory is deleted.
## Code After:
import logging
logger = logging.getLogger(__name__)
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from mezzanine.blog.models import BlogCategory
from website.jdpages.models import ColumnElement
@receiver(post_save)
def post_save_callback(sender, instance, created, **kwargs):
"""
Called after a model instance is saved.
Create related models here.
Arguments:
sender -- the model class
instance -- the actual instance being saved
created -- a boolean; True if a new record was created
"""
if not created:
return
if sender == BlogCategory:
if ColumnElement.objects.filter(object_id=instance.id, content_type=ContentType.objects.get_for_model(sender)):
return
blog_category = instance
blog_category_element = ColumnElement()
blog_category_element.title = blog_category.title
blog_category_element.content_type = ContentType.objects.get_for_model(BlogCategory)
blog_category_element.object_id = blog_category.id
blog_category_element.save()
blog_category_element.site_id = instance.site_id
blog_category_element.save(update_site=False)
return
@receiver(pre_delete)
def pre_delete_callback(sender, instance, **kwargs):
"""
Called just before a model is deleted.
Delete related models here.
Arguments:
sender -- the model class
instance -- the actual instance being saved
"""
if sender == BlogCategory:
related_elements = ColumnElement.objects.filter(object_id=instance.id, content_type=ContentType.objects.get_for_model(sender))
if related_elements:
for element in related_elements:
element.delete()
return
|
import logging
logger = logging.getLogger(__name__)
from django.contrib.contenttypes.models import ContentType
- from django.db.models.signals import post_save
+ from django.db.models.signals import post_save, pre_delete
? ++++++++++++
from django.dispatch import receiver
from mezzanine.blog.models import BlogCategory
from website.jdpages.models import ColumnElement
@receiver(post_save)
def post_save_callback(sender, instance, created, **kwargs):
"""
Called after a model instance is saved.
- Created related database objects for some
+ Create related models here.
Arguments:
sender -- the model class
instance -- the actual instance being saved
created -- a boolean; True if a new record was created
"""
if not created:
return
if sender == BlogCategory:
if ColumnElement.objects.filter(object_id=instance.id, content_type=ContentType.objects.get_for_model(sender)):
return
blog_category = instance
blog_category_element = ColumnElement()
blog_category_element.title = blog_category.title
blog_category_element.content_type = ContentType.objects.get_for_model(BlogCategory)
blog_category_element.object_id = blog_category.id
blog_category_element.save()
blog_category_element.site_id = instance.site_id
blog_category_element.save(update_site=False)
return
+
+ @receiver(pre_delete)
+ def pre_delete_callback(sender, instance, **kwargs):
+ """
+ Called just before a model is deleted.
+ Delete related models here.
+ Arguments:
+ sender -- the model class
+ instance -- the actual instance being saved
+ """
+ if sender == BlogCategory:
+ related_elements = ColumnElement.objects.filter(object_id=instance.id, content_type=ContentType.objects.get_for_model(sender))
+ if related_elements:
+ for element in related_elements:
+ element.delete()
+ return
|
3e28adb3b32e1c88e9295c44e79840ebfe67f83f
|
py/foxgami/db.py
|
py/foxgami/db.py
|
import functools
from sqlalchemy import create_engine
@functools.lru_cache()
def engine():
return create_engine('postgresql://foxgami:foxgami@localhost/foxgami')
def query(sql, args=()):
e = engine()
result = e.execute(sql, tuple(args))
if result:
return list(result)
def query_single(sql, args=()):
rows = list(query(sql, args))
if len(rows) >= 1:
return rows[0]
else:
return None
|
import functools
from sqlalchemy import create_engine
@functools.lru_cache()
def engine():
return create_engine('postgresql://foxgami:foxgami@localhost/foxgami')
def query(sql, args=()):
e = engine()
result = e.execute(sql, tuple(args))
if result.returns_rows:
return list(result)
def query_single(sql, args=()):
rows = list(query(sql, args))
if len(rows) >= 1:
return rows[0]
else:
return None
|
Use .returns_rows to determine if we should return list type
|
Use .returns_rows to determine if we should return list type
|
Python
|
mit
|
flubstep/foxgami.com,flubstep/foxgami.com
|
import functools
from sqlalchemy import create_engine
@functools.lru_cache()
def engine():
return create_engine('postgresql://foxgami:foxgami@localhost/foxgami')
def query(sql, args=()):
e = engine()
result = e.execute(sql, tuple(args))
- if result:
+ if result.returns_rows:
return list(result)
def query_single(sql, args=()):
rows = list(query(sql, args))
if len(rows) >= 1:
return rows[0]
else:
return None
|
Use .returns_rows to determine if we should return list type
|
## Code Before:
import functools
from sqlalchemy import create_engine
@functools.lru_cache()
def engine():
return create_engine('postgresql://foxgami:foxgami@localhost/foxgami')
def query(sql, args=()):
e = engine()
result = e.execute(sql, tuple(args))
if result:
return list(result)
def query_single(sql, args=()):
rows = list(query(sql, args))
if len(rows) >= 1:
return rows[0]
else:
return None
## Instruction:
Use .returns_rows to determine if we should return list type
## Code After:
import functools
from sqlalchemy import create_engine
@functools.lru_cache()
def engine():
return create_engine('postgresql://foxgami:foxgami@localhost/foxgami')
def query(sql, args=()):
e = engine()
result = e.execute(sql, tuple(args))
if result.returns_rows:
return list(result)
def query_single(sql, args=()):
rows = list(query(sql, args))
if len(rows) >= 1:
return rows[0]
else:
return None
|
import functools
from sqlalchemy import create_engine
@functools.lru_cache()
def engine():
return create_engine('postgresql://foxgami:foxgami@localhost/foxgami')
def query(sql, args=()):
e = engine()
result = e.execute(sql, tuple(args))
- if result:
+ if result.returns_rows:
return list(result)
def query_single(sql, args=()):
rows = list(query(sql, args))
if len(rows) >= 1:
return rows[0]
else:
return None
|
0624417fbac1cf23316ee0a58ae41c0519a390c4
|
go/apps/surveys/definition.py
|
go/apps/surveys/definition.py
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
from go.apps.surveys.tasks import export_vxpolls_data
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
def perform_action(self, action_data):
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
from go.apps.surveys.tasks import export_vxpolls_data
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
action_display_verb = 'Send CSV via e-mail'
def perform_action(self, action_data):
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
|
Change download survey data display verb to 'Send CSV via e-mail'.
|
Change download survey data display verb to 'Send CSV via e-mail'.
|
Python
|
bsd-3-clause
|
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
from go.apps.surveys.tasks import export_vxpolls_data
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
+ action_display_verb = 'Send CSV via e-mail'
def perform_action(self, action_data):
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
|
Change download survey data display verb to 'Send CSV via e-mail'.
|
## Code Before:
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
from go.apps.surveys.tasks import export_vxpolls_data
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
def perform_action(self, action_data):
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
## Instruction:
Change download survey data display verb to 'Send CSV via e-mail'.
## Code After:
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
from go.apps.surveys.tasks import export_vxpolls_data
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
action_display_verb = 'Send CSV via e-mail'
def perform_action(self, action_data):
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
from go.apps.surveys.tasks import export_vxpolls_data
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
+ action_display_verb = 'Send CSV via e-mail'
def perform_action(self, action_data):
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
|
4efa9c87264eabb6712f4fb787ab0de42be18de6
|
places/urls.py
|
places/urls.py
|
from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
|
from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
Move places urlpatterns to Django 2.0 preferred method
|
Move places urlpatterns to Django 2.0 preferred method
|
Python
|
mit
|
evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca
|
- from django.conf.urls import url
+ from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
- url(r'^$', views.IndexView.as_view(), name='index'),
+ path('', views.IndexView.as_view(), name='index'),
- url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
+ path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
Move places urlpatterns to Django 2.0 preferred method
|
## Code Before:
from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
## Instruction:
Move places urlpatterns to Django 2.0 preferred method
## Code After:
from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
- from django.conf.urls import url
+ from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
- url(r'^$', views.IndexView.as_view(), name='index'),
? ^^^ - --
+ path('', views.IndexView.as_view(), name='index'),
? ^^^^
- url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
? ^^^ - ---- ------- -
+ path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
? ^^^^ +++++
]
|
9a81879bd4eb01be5ed74acfdaf22acb635a9817
|
pikalang/__init__.py
|
pikalang/__init__.py
|
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
from __future__ import print_function
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
Add proper printing for py2
|
Add proper printing for py2
|
Python
|
mit
|
groteworld/pikalang,grotewold/pikalang
|
+
+ from __future__ import print_function
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
Add proper printing for py2
|
## Code Before:
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
## Instruction:
Add proper printing for py2
## Code After:
from __future__ import print_function
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
+
+ from __future__ import print_function
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
d971fbb4dc3b69e012b212cd54b6e8511571e1f5
|
graphene/core/classtypes/uniontype.py
|
graphene/core/classtypes/uniontype.py
|
import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
resolve_type=lambda instance, info: cls._resolve_type(schema, instance, info),
description=cls._meta.description,
)
|
from functools import partial
import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
resolve_type=partial(cls._resolve_type, schema),
description=cls._meta.description,
)
|
Update to use partial instead of lambda function
|
Update to use partial instead of lambda function
|
Python
|
mit
|
sjhewitt/graphene,graphql-python/graphene,sjhewitt/graphene,Globegitter/graphene,graphql-python/graphene,Globegitter/graphene
|
+ from functools import partial
+
import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
- resolve_type=lambda instance, info: cls._resolve_type(schema, instance, info),
+ resolve_type=partial(cls._resolve_type, schema),
description=cls._meta.description,
)
|
Update to use partial instead of lambda function
|
## Code Before:
import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
resolve_type=lambda instance, info: cls._resolve_type(schema, instance, info),
description=cls._meta.description,
)
## Instruction:
Update to use partial instead of lambda function
## Code After:
from functools import partial
import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
resolve_type=partial(cls._resolve_type, schema),
description=cls._meta.description,
)
|
+ from functools import partial
+
import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
- resolve_type=lambda instance, info: cls._resolve_type(schema, instance, info),
+ resolve_type=partial(cls._resolve_type, schema),
description=cls._meta.description,
)
|
4640b75fdb794e29cb6e7bdc03a6697d8f9f3483
|
emu/processes/wps_ultimate_question.py
|
emu/processes/wps_ultimate_question.py
|
from pywps import Process, LiteralOutput
from pywps.app.Common import Metadata
class UltimateQuestion(Process):
def __init__(self):
inputs = []
outputs = [LiteralOutput('answer', 'Answer to Ultimate Question', data_type='string')]
super(UltimateQuestion, self).__init__(
self._handler,
identifier='ultimate_question',
version='2.0',
title='Answer to the ultimate question',
abstract='This process gives the answer to the ultimate question of "What is the meaning of life?"',
profile='',
metadata=[Metadata('Ultimate Question'), Metadata('What is the meaning of life')],
inputs=inputs,
outputs=outputs,
store_supported=True,
status_supported=True
)
@staticmethod
def _handler(request, response):
import time
response.update_status('PyWPS Process started.', 0)
sleep_delay = .1
time.sleep(sleep_delay)
response.update_status('Thinking...', 20)
time.sleep(sleep_delay)
response.update_status('Thinking...', 40)
time.sleep(sleep_delay)
response.update_status('Thinking...', 60)
time.sleep(sleep_delay)
response.update_status('Thinking...', 80)
response.outputs['answer'].data = '42'
response.update_status('PyWPS Process completed.', 100)
return response
|
from pywps import Process, LiteralOutput
from pywps.app.Common import Metadata
class UltimateQuestion(Process):
def __init__(self):
inputs = []
outputs = [LiteralOutput('answer', 'Answer to Ultimate Question', data_type='string')]
super(UltimateQuestion, self).__init__(
self._handler,
identifier='ultimate_question',
version='2.0',
title='Answer to the ultimate question',
abstract='This process gives the answer to the ultimate question of life, the universe, and everything.',
profile='',
metadata=[Metadata('Ultimate Question'), Metadata('What is the meaning of life')],
inputs=inputs,
outputs=outputs,
store_supported=True,
status_supported=True
)
@staticmethod
def _handler(request, response):
import time
sleep_delay = .1
response.update_status('PyWPS Process started.', 0)
time.sleep(sleep_delay)
response.update_status("Contacting the Deep Thought supercomputer.", 10)
time.sleep(sleep_delay)
response.update_status('Thinking...', 20)
time.sleep(sleep_delay)
response.update_status('Thinking...', 40)
time.sleep(sleep_delay)
response.update_status('Thinking...', 60)
time.sleep(sleep_delay)
response.update_status('Thinking...', 80)
response.outputs['answer'].data = '42'
response.update_status('PyWPS Process completed.', 100)
return response
|
Make ultimate question ever more ultimate
|
Make ultimate question ever more ultimate
|
Python
|
apache-2.0
|
bird-house/emu
|
from pywps import Process, LiteralOutput
from pywps.app.Common import Metadata
class UltimateQuestion(Process):
def __init__(self):
inputs = []
outputs = [LiteralOutput('answer', 'Answer to Ultimate Question', data_type='string')]
super(UltimateQuestion, self).__init__(
self._handler,
identifier='ultimate_question',
version='2.0',
title='Answer to the ultimate question',
- abstract='This process gives the answer to the ultimate question of "What is the meaning of life?"',
+ abstract='This process gives the answer to the ultimate question of life, the universe, and everything.',
profile='',
metadata=[Metadata('Ultimate Question'), Metadata('What is the meaning of life')],
inputs=inputs,
outputs=outputs,
store_supported=True,
status_supported=True
)
@staticmethod
def _handler(request, response):
import time
+ sleep_delay = .1
response.update_status('PyWPS Process started.', 0)
+ time.sleep(sleep_delay)
- sleep_delay = .1
+ response.update_status("Contacting the Deep Thought supercomputer.", 10)
time.sleep(sleep_delay)
response.update_status('Thinking...', 20)
time.sleep(sleep_delay)
response.update_status('Thinking...', 40)
time.sleep(sleep_delay)
response.update_status('Thinking...', 60)
time.sleep(sleep_delay)
response.update_status('Thinking...', 80)
response.outputs['answer'].data = '42'
response.update_status('PyWPS Process completed.', 100)
return response
|
Make ultimate question ever more ultimate
|
## Code Before:
from pywps import Process, LiteralOutput
from pywps.app.Common import Metadata
class UltimateQuestion(Process):
def __init__(self):
inputs = []
outputs = [LiteralOutput('answer', 'Answer to Ultimate Question', data_type='string')]
super(UltimateQuestion, self).__init__(
self._handler,
identifier='ultimate_question',
version='2.0',
title='Answer to the ultimate question',
abstract='This process gives the answer to the ultimate question of "What is the meaning of life?"',
profile='',
metadata=[Metadata('Ultimate Question'), Metadata('What is the meaning of life')],
inputs=inputs,
outputs=outputs,
store_supported=True,
status_supported=True
)
@staticmethod
def _handler(request, response):
import time
response.update_status('PyWPS Process started.', 0)
sleep_delay = .1
time.sleep(sleep_delay)
response.update_status('Thinking...', 20)
time.sleep(sleep_delay)
response.update_status('Thinking...', 40)
time.sleep(sleep_delay)
response.update_status('Thinking...', 60)
time.sleep(sleep_delay)
response.update_status('Thinking...', 80)
response.outputs['answer'].data = '42'
response.update_status('PyWPS Process completed.', 100)
return response
## Instruction:
Make ultimate question ever more ultimate
## Code After:
from pywps import Process, LiteralOutput
from pywps.app.Common import Metadata
class UltimateQuestion(Process):
def __init__(self):
inputs = []
outputs = [LiteralOutput('answer', 'Answer to Ultimate Question', data_type='string')]
super(UltimateQuestion, self).__init__(
self._handler,
identifier='ultimate_question',
version='2.0',
title='Answer to the ultimate question',
abstract='This process gives the answer to the ultimate question of life, the universe, and everything.',
profile='',
metadata=[Metadata('Ultimate Question'), Metadata('What is the meaning of life')],
inputs=inputs,
outputs=outputs,
store_supported=True,
status_supported=True
)
@staticmethod
def _handler(request, response):
import time
sleep_delay = .1
response.update_status('PyWPS Process started.', 0)
time.sleep(sleep_delay)
response.update_status("Contacting the Deep Thought supercomputer.", 10)
time.sleep(sleep_delay)
response.update_status('Thinking...', 20)
time.sleep(sleep_delay)
response.update_status('Thinking...', 40)
time.sleep(sleep_delay)
response.update_status('Thinking...', 60)
time.sleep(sleep_delay)
response.update_status('Thinking...', 80)
response.outputs['answer'].data = '42'
response.update_status('PyWPS Process completed.', 100)
return response
|
from pywps import Process, LiteralOutput
from pywps.app.Common import Metadata
class UltimateQuestion(Process):
def __init__(self):
inputs = []
outputs = [LiteralOutput('answer', 'Answer to Ultimate Question', data_type='string')]
super(UltimateQuestion, self).__init__(
self._handler,
identifier='ultimate_question',
version='2.0',
title='Answer to the ultimate question',
- abstract='This process gives the answer to the ultimate question of "What is the meaning of life?"',
? ^^^^^^ ^ ^ ^^^^^^^^^^
+ abstract='This process gives the answer to the ultimate question of life, the universe, and everything.',
? ^ ^^^ ^^^^ +++++ +++++++++ ^
profile='',
metadata=[Metadata('Ultimate Question'), Metadata('What is the meaning of life')],
inputs=inputs,
outputs=outputs,
store_supported=True,
status_supported=True
)
@staticmethod
def _handler(request, response):
import time
+ sleep_delay = .1
response.update_status('PyWPS Process started.', 0)
+ time.sleep(sleep_delay)
- sleep_delay = .1
+ response.update_status("Contacting the Deep Thought supercomputer.", 10)
time.sleep(sleep_delay)
response.update_status('Thinking...', 20)
time.sleep(sleep_delay)
response.update_status('Thinking...', 40)
time.sleep(sleep_delay)
response.update_status('Thinking...', 60)
time.sleep(sleep_delay)
response.update_status('Thinking...', 80)
response.outputs['answer'].data = '42'
response.update_status('PyWPS Process completed.', 100)
return response
|
8e4d77636a9846296225ddbfab872be4c7486261
|
dask_distance/_pycompat.py
|
dask_distance/_pycompat.py
|
try:
irange = xrange
except NameError:
irange = range
|
try:
irange = xrange
except NameError:
irange = range
try:
from itertools import izip
except ImportError:
izip = zip
|
Add izip for Python 2/3 compatibility
|
Add izip for Python 2/3 compatibility
Simply use `izip` from `itertools` on Python 2 and alias `izip` as `zip`
on Python 3. This way an iterable form of `zip` remains available on
both Python 2 and Python 3 that is named `izip`. Should help avoid
having the performance of the two implementations from diverging too
far.
|
Python
|
bsd-3-clause
|
jakirkham/dask-distance
|
try:
irange = xrange
except NameError:
irange = range
+ try:
+ from itertools import izip
+ except ImportError:
+ izip = zip
+
|
Add izip for Python 2/3 compatibility
|
## Code Before:
try:
irange = xrange
except NameError:
irange = range
## Instruction:
Add izip for Python 2/3 compatibility
## Code After:
try:
irange = xrange
except NameError:
irange = range
try:
from itertools import izip
except ImportError:
izip = zip
|
try:
irange = xrange
except NameError:
irange = range
+
+ try:
+ from itertools import izip
+ except ImportError:
+ izip = zip
|
19a9ccb0b896c87ba04b47081c6b796cb37bd022
|
test/test_cypher.py
|
test/test_cypher.py
|
from neomodel import StructuredNode, StringProperty, CypherException
class User2(StructuredNode):
email = StringProperty()
def test_start_cypher():
jim = User2(email='[email protected]').save()
email = jim.start_cypher("RETURN a.email")[0][0][0]
assert email == '[email protected]'
def test_cypher():
jim = User2(email='[email protected]').save()
email = jim.cypher("START a=node({self}) RETURN a.email")[0][0][0]
assert email == '[email protected]'
def test_cypher_syntax_error():
jim = User2(email='[email protected]').save()
try:
jim.cypher("START a=node({self}) RETURN xx")
except CypherException as e:
assert hasattr(e, 'message')
assert hasattr(e, 'query')
assert hasattr(e, 'query_parameters')
assert hasattr(e, 'java_trace')
assert hasattr(e, 'java_exception')
else:
assert False
|
from neomodel import StructuredNode, StringProperty, CypherException
class User2(StructuredNode):
email = StringProperty()
def test_cypher():
jim = User2(email='[email protected]').save()
email = jim.cypher("START a=node({self}) RETURN a.email")[0][0][0]
assert email == '[email protected]'
def test_cypher_syntax_error():
jim = User2(email='[email protected]').save()
try:
jim.cypher("START a=node({self}) RETURN xx")
except CypherException as e:
assert hasattr(e, 'message')
assert hasattr(e, 'query')
assert hasattr(e, 'query_parameters')
assert hasattr(e, 'java_trace')
assert hasattr(e, 'java_exception')
else:
assert False
|
Remove test of deprecated method
|
Remove test of deprecated method
|
Python
|
mit
|
bleib1dj/neomodel,robinedwards/neomodel,cristigociu/neomodel_dh,fpieper/neomodel,andrefsp/neomodel,wcooley/neomodel,robinedwards/neomodel,bleib1dj/neomodel,pombredanne/neomodel
|
from neomodel import StructuredNode, StringProperty, CypherException
class User2(StructuredNode):
email = StringProperty()
-
-
- def test_start_cypher():
- jim = User2(email='[email protected]').save()
- email = jim.start_cypher("RETURN a.email")[0][0][0]
- assert email == '[email protected]'
def test_cypher():
jim = User2(email='[email protected]').save()
email = jim.cypher("START a=node({self}) RETURN a.email")[0][0][0]
assert email == '[email protected]'
def test_cypher_syntax_error():
jim = User2(email='[email protected]').save()
try:
jim.cypher("START a=node({self}) RETURN xx")
except CypherException as e:
assert hasattr(e, 'message')
assert hasattr(e, 'query')
assert hasattr(e, 'query_parameters')
assert hasattr(e, 'java_trace')
assert hasattr(e, 'java_exception')
else:
assert False
|
Remove test of deprecated method
|
## Code Before:
from neomodel import StructuredNode, StringProperty, CypherException
class User2(StructuredNode):
email = StringProperty()
def test_start_cypher():
jim = User2(email='[email protected]').save()
email = jim.start_cypher("RETURN a.email")[0][0][0]
assert email == '[email protected]'
def test_cypher():
jim = User2(email='[email protected]').save()
email = jim.cypher("START a=node({self}) RETURN a.email")[0][0][0]
assert email == '[email protected]'
def test_cypher_syntax_error():
jim = User2(email='[email protected]').save()
try:
jim.cypher("START a=node({self}) RETURN xx")
except CypherException as e:
assert hasattr(e, 'message')
assert hasattr(e, 'query')
assert hasattr(e, 'query_parameters')
assert hasattr(e, 'java_trace')
assert hasattr(e, 'java_exception')
else:
assert False
## Instruction:
Remove test of deprecated method
## Code After:
from neomodel import StructuredNode, StringProperty, CypherException
class User2(StructuredNode):
email = StringProperty()
def test_cypher():
jim = User2(email='[email protected]').save()
email = jim.cypher("START a=node({self}) RETURN a.email")[0][0][0]
assert email == '[email protected]'
def test_cypher_syntax_error():
jim = User2(email='[email protected]').save()
try:
jim.cypher("START a=node({self}) RETURN xx")
except CypherException as e:
assert hasattr(e, 'message')
assert hasattr(e, 'query')
assert hasattr(e, 'query_parameters')
assert hasattr(e, 'java_trace')
assert hasattr(e, 'java_exception')
else:
assert False
|
from neomodel import StructuredNode, StringProperty, CypherException
class User2(StructuredNode):
email = StringProperty()
-
-
- def test_start_cypher():
- jim = User2(email='[email protected]').save()
- email = jim.start_cypher("RETURN a.email")[0][0][0]
- assert email == '[email protected]'
def test_cypher():
jim = User2(email='[email protected]').save()
email = jim.cypher("START a=node({self}) RETURN a.email")[0][0][0]
assert email == '[email protected]'
def test_cypher_syntax_error():
jim = User2(email='[email protected]').save()
try:
jim.cypher("START a=node({self}) RETURN xx")
except CypherException as e:
assert hasattr(e, 'message')
assert hasattr(e, 'query')
assert hasattr(e, 'query_parameters')
assert hasattr(e, 'java_trace')
assert hasattr(e, 'java_exception')
else:
assert False
|
421dbe962dae44cad7aa734a397cb16fe9b1632f
|
reactive/datanode.py
|
reactive/datanode.py
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
Update charms.hadoop reference to follow convention
|
Update charms.hadoop reference to follow convention
|
Python
|
apache-2.0
|
johnsca/layer-apache-hadoop-datanode,juju-solutions/layer-apache-hadoop-datanode
|
from charms.reactive import when, when_not, set_state, remove_state
- from charms.hadoop import get_hadoop_base
+ from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
Update charms.hadoop reference to follow convention
|
## Code Before:
from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
## Instruction:
Update charms.hadoop reference to follow convention
## Code After:
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
from charms.reactive import when, when_not, set_state, remove_state
- from charms.hadoop import get_hadoop_base
+ from charms.layer.hadoop_base import get_hadoop_base
? ++++++ +++++
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
992b3302c4cb690e86436c54c43d0bb2aa406b0d
|
scrapi/harvesters/hacettepe_U_DIM.py
|
scrapi/harvesters/hacettepe_U_DIM.py
|
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class Hacettepe_u_dimHarvester(OAIHarvester):
short_name = 'hacettepe_U_DIM'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class HacettepeHarvester(OAIHarvester):
short_name = 'hacettepe'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
Change shortname and class name
|
Change shortname and class name
|
Python
|
apache-2.0
|
alexgarciac/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,ostwald/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,jeffreyliu3230/scrapi,mehanig/scrapi,erinspace/scrapi,felliott/scrapi,felliott/scrapi,fabianvf/scrapi
|
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
- class Hacettepe_u_dimHarvester(OAIHarvester):
+ class HacettepeHarvester(OAIHarvester):
- short_name = 'hacettepe_U_DIM'
+ short_name = 'hacettepe'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
Change shortname and class name
|
## Code Before:
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class Hacettepe_u_dimHarvester(OAIHarvester):
short_name = 'hacettepe_U_DIM'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
## Instruction:
Change shortname and class name
## Code After:
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class HacettepeHarvester(OAIHarvester):
short_name = 'hacettepe'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
- class Hacettepe_u_dimHarvester(OAIHarvester):
? ------
+ class HacettepeHarvester(OAIHarvester):
- short_name = 'hacettepe_U_DIM'
? ------
+ short_name = 'hacettepe'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
ace54e86e9462b25acd1636e0e9905ba6decfe9b
|
admin_tools/dashboard/views.py
|
admin_tools/dashboard/views.py
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.contrib import messages
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from django.contrib.csrf.middleware import csrf_exempt
from .forms import DashboardPreferencesForm
from .models import DashboardPreferences
@login_required
@csrf_exempt
def set_preferences(request, dashboard_id):
"""
This view serves and validates a preferences form.
"""
try:
preferences = DashboardPreferences.objects.get(
user=request.user,
dashboard_id=dashboard_id
)
except DashboardPreferences.DoesNotExist:
preferences = None
if request.method == "POST":
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
data=request.POST,
instance=preferences
)
if form.is_valid():
preferences = form.save()
if request.is_ajax():
return HttpResponse('true')
messages.success(request, 'Preferences saved')
elif request.is_ajax():
return HttpResponse('false')
else:
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
instance=preferences
)
return render_to_response('admin_tools/dashboard/preferences_form.html',
RequestContext(request, {'form': form}))
|
from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.contrib import messages
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from django.contrib.csrf.middleware import csrf_exempt
from .forms import DashboardPreferencesForm
from .models import DashboardPreferences
@staff_member_required
@csrf_exempt
def set_preferences(request, dashboard_id):
"""
This view serves and validates a preferences form.
"""
try:
preferences = DashboardPreferences.objects.get(
user=request.user,
dashboard_id=dashboard_id
)
except DashboardPreferences.DoesNotExist:
preferences = None
if request.method == "POST":
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
data=request.POST,
instance=preferences
)
if form.is_valid():
preferences = form.save()
if request.is_ajax():
return HttpResponse('true')
messages.success(request, 'Preferences saved')
elif request.is_ajax():
return HttpResponse('false')
else:
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
instance=preferences
)
return render_to_response('admin_tools/dashboard/preferences_form.html',
RequestContext(request, {'form': form}))
|
Use @staff_member_required decorator for the dashboard view as well
|
Use @staff_member_required decorator for the dashboard view as well
|
Python
|
mit
|
django-admin-tools/django-admin-tools,django-admin-tools/django-admin-tools,django-admin-tools/django-admin-tools
|
- from django.contrib.auth.decorators import login_required
+ from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.contrib import messages
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from django.contrib.csrf.middleware import csrf_exempt
from .forms import DashboardPreferencesForm
from .models import DashboardPreferences
- @login_required
+ @staff_member_required
@csrf_exempt
def set_preferences(request, dashboard_id):
"""
This view serves and validates a preferences form.
"""
try:
preferences = DashboardPreferences.objects.get(
user=request.user,
dashboard_id=dashboard_id
)
except DashboardPreferences.DoesNotExist:
preferences = None
if request.method == "POST":
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
data=request.POST,
instance=preferences
)
if form.is_valid():
preferences = form.save()
if request.is_ajax():
return HttpResponse('true')
messages.success(request, 'Preferences saved')
elif request.is_ajax():
return HttpResponse('false')
else:
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
instance=preferences
)
return render_to_response('admin_tools/dashboard/preferences_form.html',
RequestContext(request, {'form': form}))
|
Use @staff_member_required decorator for the dashboard view as well
|
## Code Before:
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.contrib import messages
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from django.contrib.csrf.middleware import csrf_exempt
from .forms import DashboardPreferencesForm
from .models import DashboardPreferences
@login_required
@csrf_exempt
def set_preferences(request, dashboard_id):
"""
This view serves and validates a preferences form.
"""
try:
preferences = DashboardPreferences.objects.get(
user=request.user,
dashboard_id=dashboard_id
)
except DashboardPreferences.DoesNotExist:
preferences = None
if request.method == "POST":
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
data=request.POST,
instance=preferences
)
if form.is_valid():
preferences = form.save()
if request.is_ajax():
return HttpResponse('true')
messages.success(request, 'Preferences saved')
elif request.is_ajax():
return HttpResponse('false')
else:
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
instance=preferences
)
return render_to_response('admin_tools/dashboard/preferences_form.html',
RequestContext(request, {'form': form}))
## Instruction:
Use @staff_member_required decorator for the dashboard view as well
## Code After:
from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.contrib import messages
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from django.contrib.csrf.middleware import csrf_exempt
from .forms import DashboardPreferencesForm
from .models import DashboardPreferences
@staff_member_required
@csrf_exempt
def set_preferences(request, dashboard_id):
"""
This view serves and validates a preferences form.
"""
try:
preferences = DashboardPreferences.objects.get(
user=request.user,
dashboard_id=dashboard_id
)
except DashboardPreferences.DoesNotExist:
preferences = None
if request.method == "POST":
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
data=request.POST,
instance=preferences
)
if form.is_valid():
preferences = form.save()
if request.is_ajax():
return HttpResponse('true')
messages.success(request, 'Preferences saved')
elif request.is_ajax():
return HttpResponse('false')
else:
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
instance=preferences
)
return render_to_response('admin_tools/dashboard/preferences_form.html',
RequestContext(request, {'form': form}))
|
- from django.contrib.auth.decorators import login_required
? ^^^ ^^^^^
+ from django.contrib.admin.views.decorators import staff_member_required
? ^^^^^^^^^^ ^^^^^^^^^^^^
from django.http import HttpResponse
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.contrib import messages
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from django.contrib.csrf.middleware import csrf_exempt
from .forms import DashboardPreferencesForm
from .models import DashboardPreferences
- @login_required
+ @staff_member_required
@csrf_exempt
def set_preferences(request, dashboard_id):
"""
This view serves and validates a preferences form.
"""
try:
preferences = DashboardPreferences.objects.get(
user=request.user,
dashboard_id=dashboard_id
)
except DashboardPreferences.DoesNotExist:
preferences = None
if request.method == "POST":
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
data=request.POST,
instance=preferences
)
if form.is_valid():
preferences = form.save()
if request.is_ajax():
return HttpResponse('true')
messages.success(request, 'Preferences saved')
elif request.is_ajax():
return HttpResponse('false')
else:
form = DashboardPreferencesForm(
user=request.user,
dashboard_id=dashboard_id,
instance=preferences
)
return render_to_response('admin_tools/dashboard/preferences_form.html',
RequestContext(request, {'form': form}))
|
1958165c7bf3b9fa45972658b980cefe6a742164
|
myhpom/validators.py
|
myhpom/validators.py
|
import re
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator, RegexValidator
from django.contrib.auth.models import User
# First Name, Last Name: At least one alphanumeric character.
name_validator = RegexValidator(
regex=r'\w',
flags=re.U,
message='Please enter your name'
)
# Email: valid email address
email_validator = EmailValidator()
# Email is not already taken
def email_not_taken_validator(email):
if len(User.objects.filter(email=email)) > 0:
raise ValidationError(u'Email already in use.')
# Password: At least 8 chars total, 1 uppercase, lowercase, digit, special char.
def password_validator(password):
errors = []
if len(password) < 8:
errors.append(u'8 characters total')
if re.search(r"[a-z]", password) is None:
errors.append(u'1 lowercase letter (a-z)')
if re.search(r"[A-Z]", password) is None:
errors.append(u'1 uppercase letter (A-Z)')
if re.search(r"\d", password) is None:
errors.append(u'1 number (0-9)')
if re.search(r"[!\@\#\$\%\^\*\(\)\_\+\-\=]", password) is None:
errors.append(u'1 special character (! @ # $ % ^ * ( ) _ + - =)')
if len(errors) > 0:
raise ValidationError(u'Please enter a password with at least ' + u', '.join(errors))
|
import re
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator, RegexValidator
# First Name, Last Name: At least one alphanumeric character.
name_validator = RegexValidator(
regex=r'\w',
flags=re.U,
message='Please enter your name'
)
# Email: valid email address
email_validator = EmailValidator()
# Email is not already taken
def email_not_taken_validator(email):
from myhpom.models import User
if len(User.objects.filter(email=email)) > 0:
raise ValidationError(u'Email already in use.')
# Password: At least 8 chars total, 1 uppercase, lowercase, digit, special char.
def password_validator(password):
errors = []
if len(password) < 8:
errors.append(u'8 characters total')
if re.search(r"[a-z]", password) is None:
errors.append(u'1 lowercase letter (a-z)')
if re.search(r"[A-Z]", password) is None:
errors.append(u'1 uppercase letter (A-Z)')
if re.search(r"\d", password) is None:
errors.append(u'1 number (0-9)')
if re.search(r"[!\@\#\$\%\^\*\(\)\_\+\-\=]", password) is None:
errors.append(u'1 special character (! @ # $ % ^ * ( ) _ + - =)')
if len(errors) > 0:
raise ValidationError(u'Please enter a password with at least ' + u', '.join(errors))
|
Revert "[mh-14] "This import is ultimately just from django.contrib.auth.models import User - using that directly would probably address whatever circular import required that this import get put here, and make it clearer which model User is."-Dane"
|
Revert "[mh-14] "This import is ultimately just from django.contrib.auth.models import User - using that directly would probably address whatever circular import required that this import get put here, and make it clearer which model User is."-Dane"
This reverts commit 7350c56339acaef416d03b6d7ae0e818ab8db182.
|
Python
|
bsd-3-clause
|
ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM
|
import re
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator, RegexValidator
- from django.contrib.auth.models import User
# First Name, Last Name: At least one alphanumeric character.
name_validator = RegexValidator(
regex=r'\w',
flags=re.U,
message='Please enter your name'
)
# Email: valid email address
email_validator = EmailValidator()
# Email is not already taken
def email_not_taken_validator(email):
+ from myhpom.models import User
if len(User.objects.filter(email=email)) > 0:
raise ValidationError(u'Email already in use.')
# Password: At least 8 chars total, 1 uppercase, lowercase, digit, special char.
def password_validator(password):
errors = []
if len(password) < 8:
errors.append(u'8 characters total')
if re.search(r"[a-z]", password) is None:
errors.append(u'1 lowercase letter (a-z)')
if re.search(r"[A-Z]", password) is None:
errors.append(u'1 uppercase letter (A-Z)')
if re.search(r"\d", password) is None:
errors.append(u'1 number (0-9)')
if re.search(r"[!\@\#\$\%\^\*\(\)\_\+\-\=]", password) is None:
errors.append(u'1 special character (! @ # $ % ^ * ( ) _ + - =)')
if len(errors) > 0:
raise ValidationError(u'Please enter a password with at least ' + u', '.join(errors))
|
Revert "[mh-14] "This import is ultimately just from django.contrib.auth.models import User - using that directly would probably address whatever circular import required that this import get put here, and make it clearer which model User is."-Dane"
|
## Code Before:
import re
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator, RegexValidator
from django.contrib.auth.models import User
# First Name, Last Name: At least one alphanumeric character.
name_validator = RegexValidator(
regex=r'\w',
flags=re.U,
message='Please enter your name'
)
# Email: valid email address
email_validator = EmailValidator()
# Email is not already taken
def email_not_taken_validator(email):
if len(User.objects.filter(email=email)) > 0:
raise ValidationError(u'Email already in use.')
# Password: At least 8 chars total, 1 uppercase, lowercase, digit, special char.
def password_validator(password):
errors = []
if len(password) < 8:
errors.append(u'8 characters total')
if re.search(r"[a-z]", password) is None:
errors.append(u'1 lowercase letter (a-z)')
if re.search(r"[A-Z]", password) is None:
errors.append(u'1 uppercase letter (A-Z)')
if re.search(r"\d", password) is None:
errors.append(u'1 number (0-9)')
if re.search(r"[!\@\#\$\%\^\*\(\)\_\+\-\=]", password) is None:
errors.append(u'1 special character (! @ # $ % ^ * ( ) _ + - =)')
if len(errors) > 0:
raise ValidationError(u'Please enter a password with at least ' + u', '.join(errors))
## Instruction:
Revert "[mh-14] "This import is ultimately just from django.contrib.auth.models import User - using that directly would probably address whatever circular import required that this import get put here, and make it clearer which model User is."-Dane"
## Code After:
import re
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator, RegexValidator
# First Name, Last Name: At least one alphanumeric character.
name_validator = RegexValidator(
regex=r'\w',
flags=re.U,
message='Please enter your name'
)
# Email: valid email address
email_validator = EmailValidator()
# Email is not already taken
def email_not_taken_validator(email):
from myhpom.models import User
if len(User.objects.filter(email=email)) > 0:
raise ValidationError(u'Email already in use.')
# Password: At least 8 chars total, 1 uppercase, lowercase, digit, special char.
def password_validator(password):
errors = []
if len(password) < 8:
errors.append(u'8 characters total')
if re.search(r"[a-z]", password) is None:
errors.append(u'1 lowercase letter (a-z)')
if re.search(r"[A-Z]", password) is None:
errors.append(u'1 uppercase letter (A-Z)')
if re.search(r"\d", password) is None:
errors.append(u'1 number (0-9)')
if re.search(r"[!\@\#\$\%\^\*\(\)\_\+\-\=]", password) is None:
errors.append(u'1 special character (! @ # $ % ^ * ( ) _ + - =)')
if len(errors) > 0:
raise ValidationError(u'Please enter a password with at least ' + u', '.join(errors))
|
import re
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator, RegexValidator
- from django.contrib.auth.models import User
# First Name, Last Name: At least one alphanumeric character.
name_validator = RegexValidator(
regex=r'\w',
flags=re.U,
message='Please enter your name'
)
# Email: valid email address
email_validator = EmailValidator()
# Email is not already taken
def email_not_taken_validator(email):
+ from myhpom.models import User
if len(User.objects.filter(email=email)) > 0:
raise ValidationError(u'Email already in use.')
# Password: At least 8 chars total, 1 uppercase, lowercase, digit, special char.
def password_validator(password):
errors = []
if len(password) < 8:
errors.append(u'8 characters total')
if re.search(r"[a-z]", password) is None:
errors.append(u'1 lowercase letter (a-z)')
if re.search(r"[A-Z]", password) is None:
errors.append(u'1 uppercase letter (A-Z)')
if re.search(r"\d", password) is None:
errors.append(u'1 number (0-9)')
if re.search(r"[!\@\#\$\%\^\*\(\)\_\+\-\=]", password) is None:
errors.append(u'1 special character (! @ # $ % ^ * ( ) _ + - =)')
if len(errors) > 0:
raise ValidationError(u'Please enter a password with at least ' + u', '.join(errors))
|
6dab7ceeb4de601c47b4d370c6184ddcd0110e89
|
doc/conf.py
|
doc/conf.py
|
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
|
import os
import sys
import sphinx_rtd_theme
import pkg_resources
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = pkg_resources.require('sphinxcontrib-ros')[0].version
release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
|
Use the source version as a doc version
|
Use the source version as a doc version
|
Python
|
bsd-2-clause
|
otamachan/sphinxcontrib-ros,otamachan/sphinxcontrib-ros
|
import os
import sys
import sphinx_rtd_theme
+ import pkg_resources
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
- version = '0.1.0'
- release = '0.1.0'
+ version = pkg_resources.require('sphinxcontrib-ros')[0].version
+ release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
|
Use the source version as a doc version
|
## Code Before:
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
## Instruction:
Use the source version as a doc version
## Code After:
import os
import sys
import sphinx_rtd_theme
import pkg_resources
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = pkg_resources.require('sphinxcontrib-ros')[0].version
release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
|
import os
import sys
import sphinx_rtd_theme
+ import pkg_resources
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
- version = '0.1.0'
- release = '0.1.0'
+ version = pkg_resources.require('sphinxcontrib-ros')[0].version
+ release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
|
a09822a4a8422493c5cb98eb9518ab0112c565d7
|
techism2/service.py
|
techism2/service.py
|
from techism2.models import Event
from datetime import datetime
def get_tags():
# TODO: cache, use django cache which uses GAE memcache
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
from techism2.models import Event
from datetime import datetime
from django.core.cache import cache
tags_cache_key = "tags"
def get_tags():
# Note: no synchronization, propably not possible on GAE
tags = cache.get(tags_cache_key)
if tags:
return tags
else:
tags = __fetch_tags()
cache.set(tags_cache_key, tags, 1800) # expire after 30 min
return tags
def __fetch_tags():
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
Use Memcache to cache tags
|
Use Memcache to cache tags
|
Python
|
apache-2.0
|
gimler/techism2,gimler/techism2
|
from techism2.models import Event
from datetime import datetime
+ from django.core.cache import cache
+ tags_cache_key = "tags"
def get_tags():
- # TODO: cache, use django cache which uses GAE memcache
+ # Note: no synchronization, propably not possible on GAE
+ tags = cache.get(tags_cache_key)
+
+ if tags:
+ return tags
+ else:
+ tags = __fetch_tags()
+ cache.set(tags_cache_key, tags, 1800) # expire after 30 min
+ return tags
+
+ def __fetch_tags():
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
Use Memcache to cache tags
|
## Code Before:
from techism2.models import Event
from datetime import datetime
def get_tags():
# TODO: cache, use django cache which uses GAE memcache
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
## Instruction:
Use Memcache to cache tags
## Code After:
from techism2.models import Event
from datetime import datetime
from django.core.cache import cache
tags_cache_key = "tags"
def get_tags():
# Note: no synchronization, propably not possible on GAE
tags = cache.get(tags_cache_key)
if tags:
return tags
else:
tags = __fetch_tags()
cache.set(tags_cache_key, tags, 1800) # expire after 30 min
return tags
def __fetch_tags():
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
from techism2.models import Event
from datetime import datetime
+ from django.core.cache import cache
+ tags_cache_key = "tags"
def get_tags():
- # TODO: cache, use django cache which uses GAE memcache
+ # Note: no synchronization, propably not possible on GAE
+ tags = cache.get(tags_cache_key)
+
+ if tags:
+ return tags
+ else:
+ tags = __fetch_tags()
+ cache.set(tags_cache_key, tags, 1800) # expire after 30 min
+ return tags
+
+ def __fetch_tags():
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
90abb9f68ed32fd5affe8200dfd3bb4836f1c69e
|
test/os_win7.py
|
test/os_win7.py
|
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
|
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
|
Revert "Add test for mbed parsing"
|
Revert "Add test for mbed parsing"
This reverts commit d37dc009f1c4f6e8855657dd6dbf17df9332f765.
|
Python
|
apache-2.0
|
mtmtech/mbed-ls,mtmtech/mbed-ls,mazimkhan/mbed-ls,jupe/mbed-ls,mazimkhan/mbed-ls,jupe/mbed-ls
|
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
- # Since we don't have mock, let's monkey-patch
-
- def get_mbed_devices_new(self):
- return [
- ('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
- ]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
-
- def test_get_mbeds(self):
-
- m = MbedLsToolsWin7()
-
- func_type = type(MbedLsToolsWin7.get_mbed_devices)
- m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
-
- mbeds = m.get_mbeds()
-
- self.assertIsNotNone(mbeds)
- self.assertEqual(1, len(mbeds))
-
- mbed = mbeds[0]
-
- self.assertEqual("D:", mbed[0])
- self.assertEqual("ATML2127031800007973", mbed[1])
-
if __name__ == '__main__':
unittest.main()
|
Revert "Add test for mbed parsing"
|
## Code Before:
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
## Instruction:
Revert "Add test for mbed parsing"
## Code After:
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
|
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
- # Since we don't have mock, let's monkey-patch
-
- def get_mbed_devices_new(self):
- return [
- ('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
- ]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
-
- def test_get_mbeds(self):
-
- m = MbedLsToolsWin7()
-
- func_type = type(MbedLsToolsWin7.get_mbed_devices)
- m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
-
- mbeds = m.get_mbeds()
-
- self.assertIsNotNone(mbeds)
- self.assertEqual(1, len(mbeds))
-
- mbed = mbeds[0]
-
- self.assertEqual("D:", mbed[0])
- self.assertEqual("ATML2127031800007973", mbed[1])
-
if __name__ == '__main__':
unittest.main()
|
bba433a582a96f5acd59eedb3286e284d81f431d
|
src/nodeconductor_openstack/tests/test_backend.py
|
src/nodeconductor_openstack/tests/test_backend.py
|
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.session_recover_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
Stop patcher in tear down
|
Stop patcher in tear down
- itacloud-7198
|
Python
|
mit
|
opennode/nodeconductor-openstack
|
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
+ self.session_recover_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
Stop patcher in tear down
|
## Code Before:
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
## Instruction:
Stop patcher in tear down
## Code After:
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.session_recover_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
+ self.session_recover_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
7e19c3058615f4599ed7339e2bd157b72cd51018
|
test_dimuon.py
|
test_dimuon.py
|
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
|
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
def test_inv_mass_nonzero_mass_particles():
# shouldn't actually make any difference if masses are non-zero
pos = Particle(1.0, +0.5, 0, pi/2)
neg = Particle(1.0, -0.5, pi, pi/2)
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
|
Test pair mass for non-zero mass particles
|
Test pair mass for non-zero mass particles
|
Python
|
mit
|
benwaugh/dimuon
|
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
-
+
+ def test_inv_mass_nonzero_mass_particles():
+ # shouldn't actually make any difference if masses are non-zero
+ pos = Particle(1.0, +0.5, 0, pi/2)
+ neg = Particle(1.0, -0.5, pi, pi/2)
+ assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
+
+
|
Test pair mass for non-zero mass particles
|
## Code Before:
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
## Instruction:
Test pair mass for non-zero mass particles
## Code After:
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
def test_inv_mass_nonzero_mass_particles():
# shouldn't actually make any difference if masses are non-zero
pos = Particle(1.0, +0.5, 0, pi/2)
neg = Particle(1.0, -0.5, pi, pi/2)
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
|
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
-
+
+ def test_inv_mass_nonzero_mass_particles():
+ # shouldn't actually make any difference if masses are non-zero
+ pos = Particle(1.0, +0.5, 0, pi/2)
+ neg = Particle(1.0, -0.5, pi, pi/2)
+ assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
+
|
ff35b4353fbb47c602d3561c5e6e84201355df14
|
Cryptor.py
|
Cryptor.py
|
from Crypto.Cipher import AES
class Cryptor(object):
def __init__(self, key, iv):
#self.aes = AES.new(key, mode=AES.MODE_CBC, IV=iv) # This resembles stuff from shairtunes
self.aes = AES.new(key, mode=AES.MODE_ECB, IV=iv) # I found this in airtunesd
self.inbuf = ""
self.outbuf = ""
self.lastLen = 0
def decrypt(self, data):
self.inbuf += data
blocksEnd = len(self.inbuf)
blocksEnd -= blocksEnd % AES.block_size
self.outbuf += self.aes.decrypt(self.inbuf[:blocksEnd])
self.inbuf = self.inbuf[blocksEnd:]
res = self.outbuf[:self.lastLen]
self.outbuf = self.outbuf[self.lastLen:]
self.lastLen = len(data)
return res
class EchoCryptor(object):
def decrypt(self, data):
return data
|
from Crypto.Cipher import AES
import Crypto.Util.Counter
class Cryptor(AES.AESCipher):
def __init__(self, key, iv):
self.counter = Crypto.Util.Counter.new(128, initial_value=long(iv.encode("hex"), 16))
AES.AESCipher.__init__(self, key, mode=AES.MODE_CTR, counter=self.counter)
class EchoCryptor(object):
def decrypt(self, data):
return data
|
Use CTR as encrypton mode. Works with iOS6.
|
Use CTR as encrypton mode. Works with iOS6.
|
Python
|
bsd-2-clause
|
tzwenn/PyOpenAirMirror,tzwenn/PyOpenAirMirror
|
from Crypto.Cipher import AES
+ import Crypto.Util.Counter
- class Cryptor(object):
+ class Cryptor(AES.AESCipher):
def __init__(self, key, iv):
+ self.counter = Crypto.Util.Counter.new(128, initial_value=long(iv.encode("hex"), 16))
+ AES.AESCipher.__init__(self, key, mode=AES.MODE_CTR, counter=self.counter)
- #self.aes = AES.new(key, mode=AES.MODE_CBC, IV=iv) # This resembles stuff from shairtunes
- self.aes = AES.new(key, mode=AES.MODE_ECB, IV=iv) # I found this in airtunesd
- self.inbuf = ""
- self.outbuf = ""
- self.lastLen = 0
-
- def decrypt(self, data):
- self.inbuf += data
- blocksEnd = len(self.inbuf)
- blocksEnd -= blocksEnd % AES.block_size
- self.outbuf += self.aes.decrypt(self.inbuf[:blocksEnd])
- self.inbuf = self.inbuf[blocksEnd:]
-
- res = self.outbuf[:self.lastLen]
- self.outbuf = self.outbuf[self.lastLen:]
-
- self.lastLen = len(data)
- return res
-
class EchoCryptor(object):
def decrypt(self, data):
return data
|
Use CTR as encrypton mode. Works with iOS6.
|
## Code Before:
from Crypto.Cipher import AES
class Cryptor(object):
def __init__(self, key, iv):
#self.aes = AES.new(key, mode=AES.MODE_CBC, IV=iv) # This resembles stuff from shairtunes
self.aes = AES.new(key, mode=AES.MODE_ECB, IV=iv) # I found this in airtunesd
self.inbuf = ""
self.outbuf = ""
self.lastLen = 0
def decrypt(self, data):
self.inbuf += data
blocksEnd = len(self.inbuf)
blocksEnd -= blocksEnd % AES.block_size
self.outbuf += self.aes.decrypt(self.inbuf[:blocksEnd])
self.inbuf = self.inbuf[blocksEnd:]
res = self.outbuf[:self.lastLen]
self.outbuf = self.outbuf[self.lastLen:]
self.lastLen = len(data)
return res
class EchoCryptor(object):
def decrypt(self, data):
return data
## Instruction:
Use CTR as encrypton mode. Works with iOS6.
## Code After:
from Crypto.Cipher import AES
import Crypto.Util.Counter
class Cryptor(AES.AESCipher):
def __init__(self, key, iv):
self.counter = Crypto.Util.Counter.new(128, initial_value=long(iv.encode("hex"), 16))
AES.AESCipher.__init__(self, key, mode=AES.MODE_CTR, counter=self.counter)
class EchoCryptor(object):
def decrypt(self, data):
return data
|
from Crypto.Cipher import AES
+ import Crypto.Util.Counter
- class Cryptor(object):
+ class Cryptor(AES.AESCipher):
def __init__(self, key, iv):
+ self.counter = Crypto.Util.Counter.new(128, initial_value=long(iv.encode("hex"), 16))
+ AES.AESCipher.__init__(self, key, mode=AES.MODE_CTR, counter=self.counter)
- #self.aes = AES.new(key, mode=AES.MODE_CBC, IV=iv) # This resembles stuff from shairtunes
- self.aes = AES.new(key, mode=AES.MODE_ECB, IV=iv) # I found this in airtunesd
- self.inbuf = ""
- self.outbuf = ""
- self.lastLen = 0
-
- def decrypt(self, data):
- self.inbuf += data
- blocksEnd = len(self.inbuf)
- blocksEnd -= blocksEnd % AES.block_size
- self.outbuf += self.aes.decrypt(self.inbuf[:blocksEnd])
- self.inbuf = self.inbuf[blocksEnd:]
-
- res = self.outbuf[:self.lastLen]
- self.outbuf = self.outbuf[self.lastLen:]
-
- self.lastLen = len(data)
- return res
-
class EchoCryptor(object):
def decrypt(self, data):
return data
|
b3850c475e449c0c6182629aa7521f335e86b1e1
|
scrapy_local.py
|
scrapy_local.py
|
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'
|
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
Fix issue with scrapy local settings
|
Fix issue with scrapy local settings
|
Python
|
mit
|
comsaint/legco-watch,comsaint/legco-watch,comsaint/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,comsaint/legco-watch
|
import os
# use this for running scrapy directly
- # PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
+ PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
- # FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
+ FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
- # Use this for deploying to scrapyd, as it would be in stage/production
- FILES_STORE = '/var/lib/scrapyd/files'
|
Fix issue with scrapy local settings
|
## Code Before:
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'
## Instruction:
Fix issue with scrapy local settings
## Code After:
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
import os
# use this for running scrapy directly
- # PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
? --
+ PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
- # FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
? --
+ FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
-
- # Use this for deploying to scrapyd, as it would be in stage/production
- FILES_STORE = '/var/lib/scrapyd/files'
|
2d94532e316e9ad563b3b7506d47cfd78ca7f689
|
tests/test_cattery.py
|
tests/test_cattery.py
|
import pytest
from catinabox import cattery
###########################################################################
# add_cats
###########################################################################
def test__add_cats__succeeds():
c = cattery.Cattery()
c.add_cats(["Fluffy", "Snookums"])
assert c.cats == ["Fluffy", "Snookums"]
assert c.num_cats == 2
###########################################################################
# remove_cat
###########################################################################
def test__remove_cat__succeeds():
c = cattery.Cattery()
c.add_cats(["Fluffy", "Junior"])
c.remove_cat("Fluffy")
assert c.cats == ["Junior"]
assert c.num_cats == 1
def test__remove_cat__no_cats__fails():
c = cattery.Cattery()
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Fluffles")
def test__remove_cat__cat_not_in_cattery__fails():
c = cattery.Cattery()
c.add_cats(["Fluffy"])
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Snookums")
|
import pytest
from catinabox import cattery
###########################################################################
# fixtures
###########################################################################
@pytest.fixture
def c():
return cattery.Cattery()
###########################################################################
# add_cats
###########################################################################
def test__add_cats__succeeds(c):
c.add_cats(["Fluffy", "Snookums"])
assert c.cats == ["Fluffy", "Snookums"]
assert c.num_cats == 2
###########################################################################
# remove_cat
###########################################################################
def test__remove_cat__succeeds(c):
c = cattery.Cattery()
c.add_cats(["Fluffy", "Junior"])
c.remove_cat("Fluffy")
assert c.cats == ["Junior"]
assert c.num_cats == 1
def test__remove_cat__no_cats__fails(c):
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Fluffles")
def test__remove_cat__cat_not_in_cattery__fails(c):
c.add_cats(["Fluffy"])
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Snookums")
|
Add fixtures to cattery tests
|
Step_5: Add fixtures to cattery tests
Add a fixture to remove initialisation of the cattery in every test.
Signed-off-by: Meghan Halton <[email protected]>
|
Python
|
mit
|
indexOutOfBound5/catinabox
|
import pytest
from catinabox import cattery
###########################################################################
+ # fixtures
+ ###########################################################################
+
+ @pytest.fixture
+ def c():
+ return cattery.Cattery()
+
+
+ ###########################################################################
# add_cats
###########################################################################
- def test__add_cats__succeeds():
+ def test__add_cats__succeeds(c):
- c = cattery.Cattery()
c.add_cats(["Fluffy", "Snookums"])
assert c.cats == ["Fluffy", "Snookums"]
assert c.num_cats == 2
###########################################################################
# remove_cat
###########################################################################
- def test__remove_cat__succeeds():
+ def test__remove_cat__succeeds(c):
c = cattery.Cattery()
c.add_cats(["Fluffy", "Junior"])
c.remove_cat("Fluffy")
assert c.cats == ["Junior"]
assert c.num_cats == 1
- def test__remove_cat__no_cats__fails():
+ def test__remove_cat__no_cats__fails(c):
- c = cattery.Cattery()
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Fluffles")
- def test__remove_cat__cat_not_in_cattery__fails():
+ def test__remove_cat__cat_not_in_cattery__fails(c):
- c = cattery.Cattery()
c.add_cats(["Fluffy"])
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Snookums")
|
Add fixtures to cattery tests
|
## Code Before:
import pytest
from catinabox import cattery
###########################################################################
# add_cats
###########################################################################
def test__add_cats__succeeds():
c = cattery.Cattery()
c.add_cats(["Fluffy", "Snookums"])
assert c.cats == ["Fluffy", "Snookums"]
assert c.num_cats == 2
###########################################################################
# remove_cat
###########################################################################
def test__remove_cat__succeeds():
c = cattery.Cattery()
c.add_cats(["Fluffy", "Junior"])
c.remove_cat("Fluffy")
assert c.cats == ["Junior"]
assert c.num_cats == 1
def test__remove_cat__no_cats__fails():
c = cattery.Cattery()
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Fluffles")
def test__remove_cat__cat_not_in_cattery__fails():
c = cattery.Cattery()
c.add_cats(["Fluffy"])
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Snookums")
## Instruction:
Add fixtures to cattery tests
## Code After:
import pytest
from catinabox import cattery
###########################################################################
# fixtures
###########################################################################
@pytest.fixture
def c():
return cattery.Cattery()
###########################################################################
# add_cats
###########################################################################
def test__add_cats__succeeds(c):
c.add_cats(["Fluffy", "Snookums"])
assert c.cats == ["Fluffy", "Snookums"]
assert c.num_cats == 2
###########################################################################
# remove_cat
###########################################################################
def test__remove_cat__succeeds(c):
c = cattery.Cattery()
c.add_cats(["Fluffy", "Junior"])
c.remove_cat("Fluffy")
assert c.cats == ["Junior"]
assert c.num_cats == 1
def test__remove_cat__no_cats__fails(c):
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Fluffles")
def test__remove_cat__cat_not_in_cattery__fails(c):
c.add_cats(["Fluffy"])
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Snookums")
|
import pytest
from catinabox import cattery
###########################################################################
+ # fixtures
+ ###########################################################################
+
+ @pytest.fixture
+ def c():
+ return cattery.Cattery()
+
+
+ ###########################################################################
# add_cats
###########################################################################
- def test__add_cats__succeeds():
+ def test__add_cats__succeeds(c):
? +
- c = cattery.Cattery()
c.add_cats(["Fluffy", "Snookums"])
assert c.cats == ["Fluffy", "Snookums"]
assert c.num_cats == 2
###########################################################################
# remove_cat
###########################################################################
- def test__remove_cat__succeeds():
+ def test__remove_cat__succeeds(c):
? +
c = cattery.Cattery()
c.add_cats(["Fluffy", "Junior"])
c.remove_cat("Fluffy")
assert c.cats == ["Junior"]
assert c.num_cats == 1
- def test__remove_cat__no_cats__fails():
+ def test__remove_cat__no_cats__fails(c):
? +
- c = cattery.Cattery()
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Fluffles")
- def test__remove_cat__cat_not_in_cattery__fails():
+ def test__remove_cat__cat_not_in_cattery__fails(c):
? +
- c = cattery.Cattery()
c.add_cats(["Fluffy"])
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Snookums")
|
d7bce814c10ce13cf4c228fd87dcbdee75f8d0a1
|
integration-test/1211-fix-null-network.py
|
integration-test/1211-fix-null-network.py
|
from . import OsmFixtureTest
class FixNullNetwork(OsmFixtureTest):
def test_routes_with_no_network(self):
# ref="N 4", route=road, but no network=*
# so we should get something that has no network, but a shield text of
# '4'
self.load_fixtures(['http://www.openstreetmap.org/relation/2307408'])
self.assert_has_feature(
11, 1038, 705, 'roads',
{'kind': 'major_road', 'shield_text': '4', 'network': type(None)})
|
from . import OsmFixtureTest
class FixNullNetwork(OsmFixtureTest):
def test_routes_with_no_network(self):
# ref="N 4", route=road, but no network=*
# so we should get something that has no network, but a shield text of
# '4'
self.load_fixtures(
['http://www.openstreetmap.org/relation/2307408'],
clip=self.tile_bbox(11, 1038, 705))
self.assert_has_feature(
11, 1038, 705, 'roads',
{'kind': 'major_road', 'shield_text': '4', 'network': type(None)})
|
Add clip to reduce fixture size.
|
Add clip to reduce fixture size.
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
from . import OsmFixtureTest
class FixNullNetwork(OsmFixtureTest):
def test_routes_with_no_network(self):
# ref="N 4", route=road, but no network=*
# so we should get something that has no network, but a shield text of
# '4'
+ self.load_fixtures(
- self.load_fixtures(['http://www.openstreetmap.org/relation/2307408'])
+ ['http://www.openstreetmap.org/relation/2307408'],
+ clip=self.tile_bbox(11, 1038, 705))
self.assert_has_feature(
11, 1038, 705, 'roads',
{'kind': 'major_road', 'shield_text': '4', 'network': type(None)})
|
Add clip to reduce fixture size.
|
## Code Before:
from . import OsmFixtureTest
class FixNullNetwork(OsmFixtureTest):
def test_routes_with_no_network(self):
# ref="N 4", route=road, but no network=*
# so we should get something that has no network, but a shield text of
# '4'
self.load_fixtures(['http://www.openstreetmap.org/relation/2307408'])
self.assert_has_feature(
11, 1038, 705, 'roads',
{'kind': 'major_road', 'shield_text': '4', 'network': type(None)})
## Instruction:
Add clip to reduce fixture size.
## Code After:
from . import OsmFixtureTest
class FixNullNetwork(OsmFixtureTest):
def test_routes_with_no_network(self):
# ref="N 4", route=road, but no network=*
# so we should get something that has no network, but a shield text of
# '4'
self.load_fixtures(
['http://www.openstreetmap.org/relation/2307408'],
clip=self.tile_bbox(11, 1038, 705))
self.assert_has_feature(
11, 1038, 705, 'roads',
{'kind': 'major_road', 'shield_text': '4', 'network': type(None)})
|
from . import OsmFixtureTest
class FixNullNetwork(OsmFixtureTest):
def test_routes_with_no_network(self):
# ref="N 4", route=road, but no network=*
# so we should get something that has no network, but a shield text of
# '4'
+ self.load_fixtures(
- self.load_fixtures(['http://www.openstreetmap.org/relation/2307408'])
? ^^^^^^^^^^^^^^^^^^^ ^
+ ['http://www.openstreetmap.org/relation/2307408'],
? ^^^^ ^
+ clip=self.tile_bbox(11, 1038, 705))
self.assert_has_feature(
11, 1038, 705, 'roads',
{'kind': 'major_road', 'shield_text': '4', 'network': type(None)})
|
48b6bb91537d9daecca2bc112f5e06dc9b530f09
|
scripts/c2s-info.py
|
scripts/c2s-info.py
|
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<10} {1}'.format(left, right))
prints('Average sampling rate:', mean([entry['fps'] for entry in data]))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean, unique
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<30} {1}'.format(left, right))
num_spikes = 0
length = 0
for entry in data:
length += entry['calcium'].size / float(entry['fps']) # seconds
if 'spike_times' in entry:
num_spikes += entry['spike_times'].size
elif 'spikes' in entry:
num_spikes += entry['spikes'].sum()
if 'cell_num' in data[0]:
num_cells = len(unique([entry['cell_num'] for entry in data]))
else:
num_cells = len(data)
prints('Number of cells:', '{0}'.format(num_cells))
prints('Number of traces:', '{0}'.format(len(data)))
prints('Total length:', '{0} minutes, {1} seconds'.format(int(length) // 60, int(length) % 60))
prints('Total number of spikes:', num_spikes)
prints('Average firing rate:', '{0:.2f} [spike/sec]'.format(num_spikes / length))
prints('Average sampling rate:', '{0:.1f}'.format(mean([entry['fps'] for entry in data])))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Print a little bit more info.
|
Print a little bit more info.
|
Python
|
mit
|
lucastheis/c2s,jonasrauber/c2s
|
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
- from numpy import corrcoef, mean
+ from numpy import corrcoef, mean, unique
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
- print('{0:<10} {1}'.format(left, right))
+ print('{0:<30} {1}'.format(left, right))
+ num_spikes = 0
+ length = 0
+ for entry in data:
+ length += entry['calcium'].size / float(entry['fps']) # seconds
+ if 'spike_times' in entry:
+ num_spikes += entry['spike_times'].size
+ elif 'spikes' in entry:
+ num_spikes += entry['spikes'].sum()
+
+ if 'cell_num' in data[0]:
+ num_cells = len(unique([entry['cell_num'] for entry in data]))
+ else:
+ num_cells = len(data)
+
+ prints('Number of cells:', '{0}'.format(num_cells))
+ prints('Number of traces:', '{0}'.format(len(data)))
+ prints('Total length:', '{0} minutes, {1} seconds'.format(int(length) // 60, int(length) % 60))
+ prints('Total number of spikes:', num_spikes)
+ prints('Average firing rate:', '{0:.2f} [spike/sec]'.format(num_spikes / length))
- prints('Average sampling rate:', mean([entry['fps'] for entry in data]))
+ prints('Average sampling rate:', '{0:.1f}'.format(mean([entry['fps'] for entry in data])))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Print a little bit more info.
|
## Code Before:
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<10} {1}'.format(left, right))
prints('Average sampling rate:', mean([entry['fps'] for entry in data]))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
## Instruction:
Print a little bit more info.
## Code After:
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean, unique
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<30} {1}'.format(left, right))
num_spikes = 0
length = 0
for entry in data:
length += entry['calcium'].size / float(entry['fps']) # seconds
if 'spike_times' in entry:
num_spikes += entry['spike_times'].size
elif 'spikes' in entry:
num_spikes += entry['spikes'].sum()
if 'cell_num' in data[0]:
num_cells = len(unique([entry['cell_num'] for entry in data]))
else:
num_cells = len(data)
prints('Number of cells:', '{0}'.format(num_cells))
prints('Number of traces:', '{0}'.format(len(data)))
prints('Total length:', '{0} minutes, {1} seconds'.format(int(length) // 60, int(length) % 60))
prints('Total number of spikes:', num_spikes)
prints('Average firing rate:', '{0:.2f} [spike/sec]'.format(num_spikes / length))
prints('Average sampling rate:', '{0:.1f}'.format(mean([entry['fps'] for entry in data])))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
- from numpy import corrcoef, mean
+ from numpy import corrcoef, mean, unique
? ++++++++
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
- print('{0:<10} {1}'.format(left, right))
? ^
+ print('{0:<30} {1}'.format(left, right))
? ^
+ num_spikes = 0
+ length = 0
+ for entry in data:
+ length += entry['calcium'].size / float(entry['fps']) # seconds
+ if 'spike_times' in entry:
+ num_spikes += entry['spike_times'].size
+ elif 'spikes' in entry:
+ num_spikes += entry['spikes'].sum()
+
+ if 'cell_num' in data[0]:
+ num_cells = len(unique([entry['cell_num'] for entry in data]))
+ else:
+ num_cells = len(data)
+
+ prints('Number of cells:', '{0}'.format(num_cells))
+ prints('Number of traces:', '{0}'.format(len(data)))
+ prints('Total length:', '{0} minutes, {1} seconds'.format(int(length) // 60, int(length) % 60))
+ prints('Total number of spikes:', num_spikes)
+ prints('Average firing rate:', '{0:.2f} [spike/sec]'.format(num_spikes / length))
- prints('Average sampling rate:', mean([entry['fps'] for entry in data]))
+ prints('Average sampling rate:', '{0:.1f}'.format(mean([entry['fps'] for entry in data])))
? +++++++++++++++++ +
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
2c03171b75b6bb4f3a77d3b46ee8fd1e5b022077
|
template_engine/jinja2_filters.py
|
template_engine/jinja2_filters.py
|
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
|
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
if type(s) is int:
return s
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
|
Fix type error if input is int
|
Fix type error if input is int
|
Python
|
mit
|
bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance
|
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
+ if type(s) is int:
+ return s
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
|
Fix type error if input is int
|
## Code Before:
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
## Instruction:
Fix type error if input is int
## Code After:
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
if type(s) is int:
return s
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
|
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
+ if type(s) is int:
+ return s
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
|
574fba0650e4c68b7a31533207c26d8d57ed49c2
|
run.py
|
run.py
|
from sys import argv
from examples.connect4.connect4 import Connect4Network
if __name__ == '__main__':
if argv[1] == 'connect4':
Connect4Network().play()
|
from sys import argv
from examples.connect4.connect4 import Connect4Network
if __name__ == '__main__':
if len(argv) > 1:
if argv[1] == 'connect4':
Connect4Network().play()
|
Check length of argv before accessing it
|
Check length of argv before accessing it
|
Python
|
mit
|
tysonzero/py-ann
|
from sys import argv
from examples.connect4.connect4 import Connect4Network
if __name__ == '__main__':
+ if len(argv) > 1:
- if argv[1] == 'connect4':
+ if argv[1] == 'connect4':
- Connect4Network().play()
+ Connect4Network().play()
|
Check length of argv before accessing it
|
## Code Before:
from sys import argv
from examples.connect4.connect4 import Connect4Network
if __name__ == '__main__':
if argv[1] == 'connect4':
Connect4Network().play()
## Instruction:
Check length of argv before accessing it
## Code After:
from sys import argv
from examples.connect4.connect4 import Connect4Network
if __name__ == '__main__':
if len(argv) > 1:
if argv[1] == 'connect4':
Connect4Network().play()
|
from sys import argv
from examples.connect4.connect4 import Connect4Network
if __name__ == '__main__':
+ if len(argv) > 1:
- if argv[1] == 'connect4':
+ if argv[1] == 'connect4':
? ++++
- Connect4Network().play()
+ Connect4Network().play()
? ++++
|
f48eb543c3ae2222a71080592ae8932c227dc605
|
roche/scripts/xml-load.py
|
roche/scripts/xml-load.py
|
import sys
sys.path.append('../../')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
with open('../../../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
|
import sys
sys.path.append('.')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
with open('../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
|
Fix relative path in relation to app root dir
|
Fix relative path in relation to app root dir
|
Python
|
mit
|
beijingren/roche-website,beijingren/roche-website,beijingren/roche-website,beijingren/roche-website
|
import sys
- sys.path.append('../../')
+ sys.path.append('.')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
- with open('../../../dublin-store/db/test_001.xml') as f:
+ with open('../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
|
Fix relative path in relation to app root dir
|
## Code Before:
import sys
sys.path.append('../../')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
with open('../../../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
## Instruction:
Fix relative path in relation to app root dir
## Code After:
import sys
sys.path.append('.')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
with open('../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
|
import sys
- sys.path.append('../../')
? -----
+ sys.path.append('.')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
- with open('../../../dublin-store/db/test_001.xml') as f:
? ------
+ with open('../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
|
ad97fa93ad50bfb73c29798f9f1f24465c6a3683
|
_lua_paths.py
|
_lua_paths.py
|
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
paths.append(__getViewPath(view))
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
viewPath=__getViewPath(view)
if viewPath is not None:
paths.append(viewPath)
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
Fix crash if view returns no valid path
|
Fix crash if view returns no valid path
|
Python
|
mit
|
coronalabs/CoronaSDK-SublimeText,coronalabs/CoronaSDK-SublimeText
|
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
- paths.append(__getViewPath(view))
-
+ viewPath=__getViewPath(view)
+ if viewPath is not None:
+ paths.append(viewPath)
+
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
Fix crash if view returns no valid path
|
## Code Before:
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
paths.append(__getViewPath(view))
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
## Instruction:
Fix crash if view returns no valid path
## Code After:
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
viewPath=__getViewPath(view)
if viewPath is not None:
paths.append(viewPath)
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
- paths.append(__getViewPath(view))
-
+ viewPath=__getViewPath(view)
+ if viewPath is not None:
+ paths.append(viewPath)
+
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
bfe5c6a16bf8515ae6ba49f4633f1a301e445092
|
redcliff/cli.py
|
redcliff/cli.py
|
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge, error
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
dest='url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
dest='key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
required = ['url', 'key']
missing = lambda x: bool(merged_conf.get(x))
if not all(map(missing, required)):
error('fatal: base_url and api_key are required')
return 1
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
Fix exception when required options are missing
|
Fix exception when required options are missing
Options can be on command line and in config file, so we check in merged
dictionary after getting from both sources.
|
Python
|
mit
|
dmedvinsky/redcliff
|
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
- from .utils import merge
+ from .utils import merge, error
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
+ dest='url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
+ dest='key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
+ required = ['url', 'key']
+ missing = lambda x: bool(merged_conf.get(x))
+ if not all(map(missing, required)):
+ error('fatal: base_url and api_key are required')
+ return 1
+
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
Fix exception when required options are missing
|
## Code Before:
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
## Instruction:
Fix exception when required options are missing
## Code After:
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge, error
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
dest='url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
dest='key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
required = ['url', 'key']
missing = lambda x: bool(merged_conf.get(x))
if not all(map(missing, required)):
error('fatal: base_url and api_key are required')
return 1
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
- from .utils import merge
+ from .utils import merge, error
? +++++++
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
+ dest='url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
+ dest='key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
+ required = ['url', 'key']
+ missing = lambda x: bool(merged_conf.get(x))
+ if not all(map(missing, required)):
+ error('fatal: base_url and api_key are required')
+ return 1
+
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
c73572f2a9b63d35daf8b5935c4a1e6a0422c122
|
pinax/documents/receivers.py
|
pinax/documents/receivers.py
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
|
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage, Document
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Document)
def document_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
|
Implement deletion of file object via Document model pre_save signal.
|
Implement deletion of file object via Document model pre_save signal.
|
Python
|
mit
|
pinax/pinax-documents
|
- from django.db.models.signals import post_save
+ from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from .conf import settings
- from .models import UserStorage
+ from .models import UserStorage, Document
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
+
+ # Receive the pre_delete signal and delete the file associated with the model instance.
+ @receiver(pre_delete, sender=Document)
+ def document_delete(sender, instance, **kwargs):
+ # Pass false so FileField doesn't save the model.
+ instance.file.delete(False)
|
Implement deletion of file object via Document model pre_save signal.
|
## Code Before:
from django.db.models.signals import post_save
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
## Instruction:
Implement deletion of file object via Document model pre_save signal.
## Code After:
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage, Document
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Document)
def document_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
|
- from django.db.models.signals import post_save
+ from django.db.models.signals import post_save, pre_delete
? ++++++++++++
from django.dispatch import receiver
from .conf import settings
- from .models import UserStorage
+ from .models import UserStorage, Document
? ++++++++++
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
+
+
+ # Receive the pre_delete signal and delete the file associated with the model instance.
+ @receiver(pre_delete, sender=Document)
+ def document_delete(sender, instance, **kwargs):
+ # Pass false so FileField doesn't save the model.
+ instance.file.delete(False)
|
d1d66c37419a85a4258f37201261d76a8f6a9e03
|
ckeditor/fields.py
|
ckeditor/fields.py
|
from django.db import models
from django import forms
from ckeditor.widgets import CKEditorWidget
class RichTextField(models.TextField):
def __init__(self, *args, **kwargs):
self.config_name = kwargs.pop("config_name", "default")
super(RichTextField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': RichTextFormField,
'config_name': self.config_name,
}
defaults.update(kwargs)
return super(RichTextField, self).formfield(**defaults)
class RichTextFormField(forms.fields.Field):
def __init__(self, config_name='default', *args, **kwargs):
kwargs.update({'widget': CKEditorWidget(config_name=config_name)})
super(RichTextFormField, self).__init__(*args, **kwargs)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^ckeditor\.fields\.RichTextField"])
except:
pass
|
from django.db import models
from django import forms
from ckeditor.widgets import CKEditorWidget
class RichTextField(models.TextField):
def __init__(self, *args, **kwargs):
self.config_name = kwargs.pop("config_name", "default")
super(RichTextField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': RichTextFormField,
'config_name': self.config_name,
}
defaults.update(kwargs)
return super(RichTextField, self).formfield(**defaults)
class RichTextFormField(forms.fields.Field):
def __init__(self,config_name ='default', max_length = None, *args, **kwargs):
kwargs.update({'widget': CKEditorWidget(config_name=config_name)})
super(RichTextFormField, self).__init__(*args, **kwargs)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^ckeditor\.fields\.RichTextField"])
except:
pass
|
Fix Function RichTextFormField now received a new parameter max_lenght for django 1.7
|
Fix Function RichTextFormField now received a new parameter max_lenght for django 1.7
|
Python
|
bsd-3-clause
|
gian88/django-ckeditor-amazon-s3,gian88/django-ckeditor-amazon-s3,gian88/django-ckeditor-amazon-s3,gian88/django-ckeditor-amazon-s3,gian88/django-ckeditor-amazon-s3,gian88/django-ckeditor-amazon-s3
|
from django.db import models
from django import forms
from ckeditor.widgets import CKEditorWidget
class RichTextField(models.TextField):
def __init__(self, *args, **kwargs):
self.config_name = kwargs.pop("config_name", "default")
super(RichTextField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': RichTextFormField,
'config_name': self.config_name,
}
defaults.update(kwargs)
return super(RichTextField, self).formfield(**defaults)
class RichTextFormField(forms.fields.Field):
- def __init__(self, config_name='default', *args, **kwargs):
+ def __init__(self,config_name ='default', max_length = None, *args, **kwargs):
kwargs.update({'widget': CKEditorWidget(config_name=config_name)})
super(RichTextFormField, self).__init__(*args, **kwargs)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^ckeditor\.fields\.RichTextField"])
except:
pass
|
Fix Function RichTextFormField now received a new parameter max_lenght for django 1.7
|
## Code Before:
from django.db import models
from django import forms
from ckeditor.widgets import CKEditorWidget
class RichTextField(models.TextField):
def __init__(self, *args, **kwargs):
self.config_name = kwargs.pop("config_name", "default")
super(RichTextField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': RichTextFormField,
'config_name': self.config_name,
}
defaults.update(kwargs)
return super(RichTextField, self).formfield(**defaults)
class RichTextFormField(forms.fields.Field):
def __init__(self, config_name='default', *args, **kwargs):
kwargs.update({'widget': CKEditorWidget(config_name=config_name)})
super(RichTextFormField, self).__init__(*args, **kwargs)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^ckeditor\.fields\.RichTextField"])
except:
pass
## Instruction:
Fix Function RichTextFormField now received a new parameter max_lenght for django 1.7
## Code After:
from django.db import models
from django import forms
from ckeditor.widgets import CKEditorWidget
class RichTextField(models.TextField):
def __init__(self, *args, **kwargs):
self.config_name = kwargs.pop("config_name", "default")
super(RichTextField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': RichTextFormField,
'config_name': self.config_name,
}
defaults.update(kwargs)
return super(RichTextField, self).formfield(**defaults)
class RichTextFormField(forms.fields.Field):
def __init__(self,config_name ='default', max_length = None, *args, **kwargs):
kwargs.update({'widget': CKEditorWidget(config_name=config_name)})
super(RichTextFormField, self).__init__(*args, **kwargs)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^ckeditor\.fields\.RichTextField"])
except:
pass
|
from django.db import models
from django import forms
from ckeditor.widgets import CKEditorWidget
class RichTextField(models.TextField):
def __init__(self, *args, **kwargs):
self.config_name = kwargs.pop("config_name", "default")
super(RichTextField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': RichTextFormField,
'config_name': self.config_name,
}
defaults.update(kwargs)
return super(RichTextField, self).formfield(**defaults)
class RichTextFormField(forms.fields.Field):
- def __init__(self, config_name='default', *args, **kwargs):
? -
+ def __init__(self,config_name ='default', max_length = None, *args, **kwargs):
? + +++++++++++++++++++
kwargs.update({'widget': CKEditorWidget(config_name=config_name)})
super(RichTextFormField, self).__init__(*args, **kwargs)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^ckeditor\.fields\.RichTextField"])
except:
pass
|
95421d1b71d2f5847bcea439cde79af2a984eda6
|
src/sentry/api/endpoints/project_releases.py
|
src/sentry/api/endpoints/project_releases.py
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import serialize
from sentry.models import Release
class ProjectReleasesEndpoint(ProjectEndpoint):
doc_section = DocSection.RELEASES
def get(self, request, project):
"""
List a project's releases
Retrieve a list of releases for a given project.
{method} {path}
"""
queryset = Release.objects.filter(
project=project,
).order_by('-date_added')
return self.paginate(
request=request,
queryset=queryset,
# TODO(dcramer): we want to sort by date_added
order_by='-id',
on_results=lambda x: serialize(x, request.user),
)
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import serialize
from sentry.models import Release
class ProjectReleasesEndpoint(ProjectEndpoint):
doc_section = DocSection.RELEASES
def get(self, request, project):
"""
List a project's releases
Retrieve a list of releases for a given project.
{method} {path}
"""
queryset = Release.objects.filter(
project=project,
)
return self.paginate(
request=request,
queryset=queryset,
order_by='-id',
on_results=lambda x: serialize(x, request.user),
)
|
Maintain project release sort order
|
Maintain project release sort order
|
Python
|
bsd-3-clause
|
zenefits/sentry,ewdurbin/sentry,fotinakis/sentry,wong2/sentry,alexm92/sentry,gencer/sentry,Natim/sentry,1tush/sentry,hongliang5623/sentry,daevaorn/sentry,BuildingLink/sentry,daevaorn/sentry,ngonzalvez/sentry,zenefits/sentry,JamesMura/sentry,ngonzalvez/sentry,pauloschilling/sentry,argonemyth/sentry,wong2/sentry,JamesMura/sentry,jokey2k/sentry,Kryz/sentry,gg7/sentry,kevinlondon/sentry,mvaled/sentry,TedaLIEz/sentry,daevaorn/sentry,Kryz/sentry,gencer/sentry,korealerts1/sentry,korealerts1/sentry,hongliang5623/sentry,jokey2k/sentry,gencer/sentry,wujuguang/sentry,imankulov/sentry,pauloschilling/sentry,drcapulet/sentry,JackDanger/sentry,kevinastone/sentry,JamesMura/sentry,nicholasserra/sentry,jean/sentry,jokey2k/sentry,alexm92/sentry,beeftornado/sentry,looker/sentry,BayanGroup/sentry,fuziontech/sentry,imankulov/sentry,gg7/sentry,drcapulet/sentry,BuildingLink/sentry,felixbuenemann/sentry,JTCunning/sentry,mvaled/sentry,mitsuhiko/sentry,llonchj/sentry,ifduyue/sentry,ifduyue/sentry,vperron/sentry,Natim/sentry,daevaorn/sentry,ifduyue/sentry,looker/sentry,BayanGroup/sentry,felixbuenemann/sentry,ewdurbin/sentry,1tush/sentry,zenefits/sentry,songyi199111/sentry,BuildingLink/sentry,fotinakis/sentry,TedaLIEz/sentry,vperron/sentry,JackDanger/sentry,llonchj/sentry,mvaled/sentry,wujuguang/sentry,gg7/sentry,1tush/sentry,nicholasserra/sentry,songyi199111/sentry,argonemyth/sentry,kevinlondon/sentry,hongliang5623/sentry,boneyao/sentry,wong2/sentry,BayanGroup/sentry,jean/sentry,BuildingLink/sentry,gencer/sentry,mvaled/sentry,boneyao/sentry,fuziontech/sentry,Natim/sentry,drcapulet/sentry,felixbuenemann/sentry,ifduyue/sentry,ngonzalvez/sentry,TedaLIEz/sentry,kevinastone/sentry,nicholasserra/sentry,boneyao/sentry,kevinlondon/sentry,alexm92/sentry,mvaled/sentry,beeftornado/sentry,JamesMura/sentry,wujuguang/sentry,JTCunning/sentry,fotinakis/sentry,zenefits/sentry,JTCunning/sentry,Kryz/sentry,pauloschilling/sentry,BuildingLink/sentry,looker/sentry,fotinakis/sentry,zenefits/sentry,imankulov/sentry,fuziontech/sentry,looker/sentry,vperron/sentry,looker/sentry,JackDanger/sentry,songyi199111/sentry,gencer/sentry,mvaled/sentry,beeftornado/sentry,jean/sentry,ifduyue/sentry,kevinastone/sentry,llonchj/sentry,jean/sentry,ewdurbin/sentry,mitsuhiko/sentry,jean/sentry,argonemyth/sentry,korealerts1/sentry,JamesMura/sentry
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import serialize
from sentry.models import Release
class ProjectReleasesEndpoint(ProjectEndpoint):
doc_section = DocSection.RELEASES
def get(self, request, project):
"""
List a project's releases
Retrieve a list of releases for a given project.
{method} {path}
"""
queryset = Release.objects.filter(
project=project,
- ).order_by('-date_added')
+ )
return self.paginate(
request=request,
queryset=queryset,
- # TODO(dcramer): we want to sort by date_added
order_by='-id',
on_results=lambda x: serialize(x, request.user),
)
|
Maintain project release sort order
|
## Code Before:
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import serialize
from sentry.models import Release
class ProjectReleasesEndpoint(ProjectEndpoint):
doc_section = DocSection.RELEASES
def get(self, request, project):
"""
List a project's releases
Retrieve a list of releases for a given project.
{method} {path}
"""
queryset = Release.objects.filter(
project=project,
).order_by('-date_added')
return self.paginate(
request=request,
queryset=queryset,
# TODO(dcramer): we want to sort by date_added
order_by='-id',
on_results=lambda x: serialize(x, request.user),
)
## Instruction:
Maintain project release sort order
## Code After:
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import serialize
from sentry.models import Release
class ProjectReleasesEndpoint(ProjectEndpoint):
doc_section = DocSection.RELEASES
def get(self, request, project):
"""
List a project's releases
Retrieve a list of releases for a given project.
{method} {path}
"""
queryset = Release.objects.filter(
project=project,
)
return self.paginate(
request=request,
queryset=queryset,
order_by='-id',
on_results=lambda x: serialize(x, request.user),
)
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import serialize
from sentry.models import Release
class ProjectReleasesEndpoint(ProjectEndpoint):
doc_section = DocSection.RELEASES
def get(self, request, project):
"""
List a project's releases
Retrieve a list of releases for a given project.
{method} {path}
"""
queryset = Release.objects.filter(
project=project,
- ).order_by('-date_added')
+ )
return self.paginate(
request=request,
queryset=queryset,
- # TODO(dcramer): we want to sort by date_added
order_by='-id',
on_results=lambda x: serialize(x, request.user),
)
|
ceb88623b55cd572d4ef45ec2fb7d81639e07878
|
fancypages/__init__.py
|
fancypages/__init__.py
|
__version__ = (0, 0, 1, 'alpha', 1)
|
import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
|
Add setting for fancypages base template dir
|
Add setting for fancypages base template dir
|
Python
|
bsd-3-clause
|
socradev/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages
|
+ import os
+
__version__ = (0, 0, 1, 'alpha', 1)
+ FP_MAIN_TEMPLATE_DIR = os.path.join(
+ os.path.dirname(os.path.abspath(__file__))
+ )
+
|
Add setting for fancypages base template dir
|
## Code Before:
__version__ = (0, 0, 1, 'alpha', 1)
## Instruction:
Add setting for fancypages base template dir
## Code After:
import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
|
+ import os
+
__version__ = (0, 0, 1, 'alpha', 1)
+
+ FP_MAIN_TEMPLATE_DIR = os.path.join(
+ os.path.dirname(os.path.abspath(__file__))
+ )
|
cc00cc1c2539eb7dbeed2656e1929c8c53c4dd98
|
pyverdict/pyverdict/datatype_converters/impala_converter.py
|
pyverdict/pyverdict/datatype_converters/impala_converter.py
|
from .converter_base import DatatypeConverterBase
import dateutil
def _str_to_datetime(java_obj, idx):
return dateutil.parser.parse(java_obj.getString(idx))
_typename_to_converter_fxn = {'timestamp': _str_to_datetime}
class ImpalaConverter(DatatypeConverterBase):
@staticmethod
def read_value(result_set, index, col_typename):
if col_typename in _typename_to_converter_fxn:
if result_set.getString(index) is None:
return None
return _typename_to_converter_fxn[col_typename](result_set, index)
else:
return result_set.getValue(index)
|
from .converter_base import DatatypeConverterBase
import dateutil
def _str_to_datetime(java_obj, idx):
return dateutil.parser.parse(java_obj.getString(idx))
_typename_to_converter_fxn = {'timestamp': _str_to_datetime}
class ImpalaConverter(DatatypeConverterBase):
'''
Type conversion rule:
BIGINT => int,
BOOLEAN => bool,
CHAR => str,
DECIMAL => decimal.Decimal,
DOUBLE => float,
FLOAT => float,
REAL => float,
SMALLINT => int,
STRING => str,
TIMESTAMP => datetime.datetime,
TINYINT => int,
VARCHAR => str
'''
@staticmethod
def read_value(result_set, index, col_typename):
if col_typename in _typename_to_converter_fxn:
if result_set.getString(index) is None:
return None
return _typename_to_converter_fxn[col_typename](result_set, index)
else:
return result_set.getValue(index)
|
Add type conversion rule comment
|
Add type conversion rule comment
|
Python
|
apache-2.0
|
mozafari/verdict,mozafari/verdict,mozafari/verdict,mozafari/verdict,mozafari/verdict
|
from .converter_base import DatatypeConverterBase
import dateutil
def _str_to_datetime(java_obj, idx):
return dateutil.parser.parse(java_obj.getString(idx))
_typename_to_converter_fxn = {'timestamp': _str_to_datetime}
class ImpalaConverter(DatatypeConverterBase):
+ '''
+ Type conversion rule:
+
+ BIGINT => int,
+ BOOLEAN => bool,
+ CHAR => str,
+ DECIMAL => decimal.Decimal,
+ DOUBLE => float,
+ FLOAT => float,
+ REAL => float,
+ SMALLINT => int,
+ STRING => str,
+ TIMESTAMP => datetime.datetime,
+ TINYINT => int,
+ VARCHAR => str
+
+ '''
+
@staticmethod
def read_value(result_set, index, col_typename):
if col_typename in _typename_to_converter_fxn:
if result_set.getString(index) is None:
return None
return _typename_to_converter_fxn[col_typename](result_set, index)
else:
return result_set.getValue(index)
|
Add type conversion rule comment
|
## Code Before:
from .converter_base import DatatypeConverterBase
import dateutil
def _str_to_datetime(java_obj, idx):
return dateutil.parser.parse(java_obj.getString(idx))
_typename_to_converter_fxn = {'timestamp': _str_to_datetime}
class ImpalaConverter(DatatypeConverterBase):
@staticmethod
def read_value(result_set, index, col_typename):
if col_typename in _typename_to_converter_fxn:
if result_set.getString(index) is None:
return None
return _typename_to_converter_fxn[col_typename](result_set, index)
else:
return result_set.getValue(index)
## Instruction:
Add type conversion rule comment
## Code After:
from .converter_base import DatatypeConverterBase
import dateutil
def _str_to_datetime(java_obj, idx):
return dateutil.parser.parse(java_obj.getString(idx))
_typename_to_converter_fxn = {'timestamp': _str_to_datetime}
class ImpalaConverter(DatatypeConverterBase):
'''
Type conversion rule:
BIGINT => int,
BOOLEAN => bool,
CHAR => str,
DECIMAL => decimal.Decimal,
DOUBLE => float,
FLOAT => float,
REAL => float,
SMALLINT => int,
STRING => str,
TIMESTAMP => datetime.datetime,
TINYINT => int,
VARCHAR => str
'''
@staticmethod
def read_value(result_set, index, col_typename):
if col_typename in _typename_to_converter_fxn:
if result_set.getString(index) is None:
return None
return _typename_to_converter_fxn[col_typename](result_set, index)
else:
return result_set.getValue(index)
|
from .converter_base import DatatypeConverterBase
import dateutil
def _str_to_datetime(java_obj, idx):
return dateutil.parser.parse(java_obj.getString(idx))
_typename_to_converter_fxn = {'timestamp': _str_to_datetime}
class ImpalaConverter(DatatypeConverterBase):
+ '''
+ Type conversion rule:
+
+ BIGINT => int,
+ BOOLEAN => bool,
+ CHAR => str,
+ DECIMAL => decimal.Decimal,
+ DOUBLE => float,
+ FLOAT => float,
+ REAL => float,
+ SMALLINT => int,
+ STRING => str,
+ TIMESTAMP => datetime.datetime,
+ TINYINT => int,
+ VARCHAR => str
+
+ '''
+
@staticmethod
def read_value(result_set, index, col_typename):
if col_typename in _typename_to_converter_fxn:
if result_set.getString(index) is None:
return None
return _typename_to_converter_fxn[col_typename](result_set, index)
else:
return result_set.getValue(index)
|
85220f2830d355245803965ee57886e5c1268833
|
tests/unit/test_twitter.py
|
tests/unit/test_twitter.py
|
from unfurl import Unfurl
import unittest
class TestTwitter(unittest.TestCase):
def test_twitter(self):
""" Test a tyipcal and a unique Discord url """
# unit test for a unique Discord url.
test = Unfurl()
test.add_to_queue(data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
# test number of nodes
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
# is processing finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
if __name__ == '__main__':
unittest.main()
|
from unfurl import Unfurl
import unittest
class TestTwitter(unittest.TestCase):
def test_twitter(self):
""" Test a typical and a unique Twitter url """
test = Unfurl()
test.add_to_queue(
data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
# check the number of nodes
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
# confirm that snowflake was detected
self.assertIn('Twitter Snowflakes', test.nodes[9].hover)
# embedded timestamp parses correctly
self.assertEqual('2019-02-20 14:40:26.837', test.nodes[13].value)
# make sure the queue finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
if __name__ == '__main__':
unittest.main()
|
Update Twitter test to be more robust
|
Update Twitter test to be more robust
|
Python
|
apache-2.0
|
obsidianforensics/unfurl,obsidianforensics/unfurl
|
from unfurl import Unfurl
import unittest
+
class TestTwitter(unittest.TestCase):
def test_twitter(self):
- """ Test a tyipcal and a unique Discord url """
+ """ Test a typical and a unique Twitter url """
+
-
- # unit test for a unique Discord url.
test = Unfurl()
- test.add_to_queue(data_type='url', key=None,
+ test.add_to_queue(
+ data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
- # test number of nodes
+ # check the number of nodes
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
- # is processing finished empty
+ # confirm that snowflake was detected
+ self.assertIn('Twitter Snowflakes', test.nodes[9].hover)
+
+ # embedded timestamp parses correctly
+ self.assertEqual('2019-02-20 14:40:26.837', test.nodes[13].value)
+
+ # make sure the queue finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
+
if __name__ == '__main__':
unittest.main()
+
|
Update Twitter test to be more robust
|
## Code Before:
from unfurl import Unfurl
import unittest
class TestTwitter(unittest.TestCase):
def test_twitter(self):
""" Test a tyipcal and a unique Discord url """
# unit test for a unique Discord url.
test = Unfurl()
test.add_to_queue(data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
# test number of nodes
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
# is processing finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
if __name__ == '__main__':
unittest.main()
## Instruction:
Update Twitter test to be more robust
## Code After:
from unfurl import Unfurl
import unittest
class TestTwitter(unittest.TestCase):
def test_twitter(self):
""" Test a typical and a unique Twitter url """
test = Unfurl()
test.add_to_queue(
data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
# check the number of nodes
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
# confirm that snowflake was detected
self.assertIn('Twitter Snowflakes', test.nodes[9].hover)
# embedded timestamp parses correctly
self.assertEqual('2019-02-20 14:40:26.837', test.nodes[13].value)
# make sure the queue finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
if __name__ == '__main__':
unittest.main()
|
from unfurl import Unfurl
import unittest
+
class TestTwitter(unittest.TestCase):
def test_twitter(self):
- """ Test a tyipcal and a unique Discord url """
? - ^ ^^^ -
+ """ Test a typical and a unique Twitter url """
? + ^^ ^^^
+
-
- # unit test for a unique Discord url.
test = Unfurl()
- test.add_to_queue(data_type='url', key=None,
+ test.add_to_queue(
+ data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
- # test number of nodes
? --
+ # check the number of nodes
? ++++++ +
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
- # is processing finished empty
+ # confirm that snowflake was detected
+ self.assertIn('Twitter Snowflakes', test.nodes[9].hover)
+
+ # embedded timestamp parses correctly
+ self.assertEqual('2019-02-20 14:40:26.837', test.nodes[13].value)
+
+ # make sure the queue finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
+
if __name__ == '__main__':
unittest.main()
|
00adc1c77d2bcc231a7f8995558ed86bb8071ae7
|
zun/websocket/websocketclient.py
|
zun/websocket/websocketclient.py
|
import logging
import socket
import websocket
from zun.common import exception
LOG = logging.getLogger(__name__)
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
import socket
import websocket
from zun.common import exception
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
Remove unused LOG in websocket
|
Remove unused LOG in websocket
Change-Id: Ic45e5e4353dd816fd5416b880aa47df8542b2e02
|
Python
|
apache-2.0
|
kevin-zhaoshuai/zun,kevin-zhaoshuai/zun,kevin-zhaoshuai/zun
|
- import logging
import socket
import websocket
from zun.common import exception
-
-
- LOG = logging.getLogger(__name__)
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
Remove unused LOG in websocket
|
## Code Before:
import logging
import socket
import websocket
from zun.common import exception
LOG = logging.getLogger(__name__)
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
## Instruction:
Remove unused LOG in websocket
## Code After:
import socket
import websocket
from zun.common import exception
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
- import logging
import socket
import websocket
from zun.common import exception
-
-
- LOG = logging.getLogger(__name__)
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
3e5e35aa85e656efbdddddf4c4d2accad964a42b
|
members/elections/serializers.py
|
members/elections/serializers.py
|
from rest_framework import serializers
from .models import Election, Candidate
class CandidatePublicSerializer(serializers.ModelSerializer):
organization = serializers.CharField(source='organization.display_name')
class Meta:
model = Candidate
fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
'biography', 'vision', 'ideas', 'expertise', 'external_url', 'seat_type', 'organization', 'reason')
|
from rest_framework import serializers
from .models import Election, Candidate
class CandidatePublicSerializer(serializers.ModelSerializer):
organization = serializers.CharField(source='organization.display_name')
expertise = serializers.SerializerMethodField()
class Meta:
model = Candidate
fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
'biography', 'vision', 'ideas', 'expertise', 'expertise_other', 'expertise_expanded',
'external_url', 'seat_type', 'organization', 'reason')
def get_expertise(self, obj):
return ', '.join(obj.get_expertise_items())
|
Update elections with new apis
|
Update elections with new apis
|
Python
|
mit
|
ocwc/ocwc-members,ocwc/ocwc-members,ocwc/ocwc-members,ocwc/ocwc-members
|
from rest_framework import serializers
from .models import Election, Candidate
+
class CandidatePublicSerializer(serializers.ModelSerializer):
- organization = serializers.CharField(source='organization.display_name')
+ organization = serializers.CharField(source='organization.display_name')
+ expertise = serializers.SerializerMethodField()
- class Meta:
- model = Candidate
- fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
- 'biography', 'vision', 'ideas', 'expertise', 'external_url', 'seat_type', 'organization', 'reason')
+ class Meta:
+ model = Candidate
+ fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
+ 'biography', 'vision', 'ideas', 'expertise', 'expertise_other', 'expertise_expanded',
+ 'external_url', 'seat_type', 'organization', 'reason')
+
+ def get_expertise(self, obj):
+ return ', '.join(obj.get_expertise_items())
+
|
Update elections with new apis
|
## Code Before:
from rest_framework import serializers
from .models import Election, Candidate
class CandidatePublicSerializer(serializers.ModelSerializer):
organization = serializers.CharField(source='organization.display_name')
class Meta:
model = Candidate
fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
'biography', 'vision', 'ideas', 'expertise', 'external_url', 'seat_type', 'organization', 'reason')
## Instruction:
Update elections with new apis
## Code After:
from rest_framework import serializers
from .models import Election, Candidate
class CandidatePublicSerializer(serializers.ModelSerializer):
organization = serializers.CharField(source='organization.display_name')
expertise = serializers.SerializerMethodField()
class Meta:
model = Candidate
fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
'biography', 'vision', 'ideas', 'expertise', 'expertise_other', 'expertise_expanded',
'external_url', 'seat_type', 'organization', 'reason')
def get_expertise(self, obj):
return ', '.join(obj.get_expertise_items())
|
from rest_framework import serializers
from .models import Election, Candidate
+
class CandidatePublicSerializer(serializers.ModelSerializer):
- organization = serializers.CharField(source='organization.display_name')
? ^
+ organization = serializers.CharField(source='organization.display_name')
? ^^^^
+ expertise = serializers.SerializerMethodField()
+
- class Meta:
? ^
+ class Meta:
? ^^^^
- model = Candidate
? ^^
+ model = Candidate
? ^^^^^^^^
- fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
? ^^ -
+ fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
? ^^^^^^^^
- 'biography', 'vision', 'ideas', 'expertise', 'external_url', 'seat_type', 'organization', 'reason')
+ 'biography', 'vision', 'ideas', 'expertise', 'expertise_other', 'expertise_expanded',
+ 'external_url', 'seat_type', 'organization', 'reason')
+
+ def get_expertise(self, obj):
+ return ', '.join(obj.get_expertise_items())
|
a20ffb81801a5f96af47ccf4bf7fe0133e74102b
|
source/views.py
|
source/views.py
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
i18n = self.enum_class.i18n[_id]
context.append({
'id': _id,
'i18n': i18n,
})
return Response(context)
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
enum_context = {'id': _id}
for field in self.fields:
enum_context[field] = getattr(self.enum_class, field)[_id]
context.append(enum_context)
return Response(context)
|
Add possibility to set fields
|
Add possibility to set fields
|
Python
|
mit
|
iktw/django-rest-enum-view
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
+ fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
+ context = []
- context = []
for enum in enums:
_id = enum[1]
- i18n = self.enum_class.i18n[_id]
+ enum_context = {'id': _id}
+ for field in self.fields:
+ enum_context[field] = getattr(self.enum_class, field)[_id]
+
- context.append({
+ context.append(enum_context)
- 'id': _id,
- 'i18n': i18n,
- })
return Response(context)
|
Add possibility to set fields
|
## Code Before:
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
i18n = self.enum_class.i18n[_id]
context.append({
'id': _id,
'i18n': i18n,
})
return Response(context)
## Instruction:
Add possibility to set fields
## Code After:
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
enum_context = {'id': _id}
for field in self.fields:
enum_context[field] = getattr(self.enum_class, field)[_id]
context.append(enum_context)
return Response(context)
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
+ fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
+ context = []
- context = []
for enum in enums:
_id = enum[1]
- i18n = self.enum_class.i18n[_id]
+ enum_context = {'id': _id}
+ for field in self.fields:
+ enum_context[field] = getattr(self.enum_class, field)[_id]
+
- context.append({
? ^
+ context.append(enum_context)
? ^^^^^^^^^^^^^
- 'id': _id,
- 'i18n': i18n,
- })
return Response(context)
|
6d567ad3eb7749692b05a7685ffbd99f74d965cd
|
manage.py
|
manage.py
|
import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate
from flask.ext.migrate import MigrateCommand
from flask_security.utils import encrypt_password
from service.models import *
from service import app
from service import db
from service import user_datastore
app.config.from_object(os.environ['SETTINGS'])
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.option('-e', '--email', dest='email')
@manager.option('-p', '--password', dest='password')
def create_user(email, password):
if not user_datastore.find_user(email=email):
user_datastore.create_user(email=email,
password=encrypt_password(password))
db.session.commit()
if __name__ == '__main__':
manager.run()
|
import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate
from flask.ext.migrate import MigrateCommand
from flask_security.utils import encrypt_password
from service.models import *
from service import app
from service import db
from service import user_datastore
app.config.from_object(os.environ['SETTINGS'])
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.option('--email', dest='email')
@manager.option('--password', dest='password')
def create_user(email, password):
if not user_datastore.find_user(email=email):
user_datastore.create_user(email=email,
password=encrypt_password(password))
db.session.commit()
if __name__ == '__main__':
manager.run()
|
Fix create user command to work locally and on heroku
|
Fix create user command to work locally and on heroku
|
Python
|
mit
|
LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha
|
import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate
from flask.ext.migrate import MigrateCommand
from flask_security.utils import encrypt_password
from service.models import *
from service import app
from service import db
from service import user_datastore
app.config.from_object(os.environ['SETTINGS'])
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
- @manager.option('-e', '--email', dest='email')
+ @manager.option('--email', dest='email')
- @manager.option('-p', '--password', dest='password')
+ @manager.option('--password', dest='password')
def create_user(email, password):
if not user_datastore.find_user(email=email):
user_datastore.create_user(email=email,
password=encrypt_password(password))
db.session.commit()
if __name__ == '__main__':
manager.run()
|
Fix create user command to work locally and on heroku
|
## Code Before:
import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate
from flask.ext.migrate import MigrateCommand
from flask_security.utils import encrypt_password
from service.models import *
from service import app
from service import db
from service import user_datastore
app.config.from_object(os.environ['SETTINGS'])
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.option('-e', '--email', dest='email')
@manager.option('-p', '--password', dest='password')
def create_user(email, password):
if not user_datastore.find_user(email=email):
user_datastore.create_user(email=email,
password=encrypt_password(password))
db.session.commit()
if __name__ == '__main__':
manager.run()
## Instruction:
Fix create user command to work locally and on heroku
## Code After:
import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate
from flask.ext.migrate import MigrateCommand
from flask_security.utils import encrypt_password
from service.models import *
from service import app
from service import db
from service import user_datastore
app.config.from_object(os.environ['SETTINGS'])
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.option('--email', dest='email')
@manager.option('--password', dest='password')
def create_user(email, password):
if not user_datastore.find_user(email=email):
user_datastore.create_user(email=email,
password=encrypt_password(password))
db.session.commit()
if __name__ == '__main__':
manager.run()
|
import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate
from flask.ext.migrate import MigrateCommand
from flask_security.utils import encrypt_password
from service.models import *
from service import app
from service import db
from service import user_datastore
app.config.from_object(os.environ['SETTINGS'])
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
- @manager.option('-e', '--email', dest='email')
? ------
+ @manager.option('--email', dest='email')
- @manager.option('-p', '--password', dest='password')
? ------
+ @manager.option('--password', dest='password')
def create_user(email, password):
if not user_datastore.find_user(email=email):
user_datastore.create_user(email=email,
password=encrypt_password(password))
db.session.commit()
if __name__ == '__main__':
manager.run()
|
76600b63940da9322673ce6cd436129a7d65f10d
|
scripts/ec2/terminate_all.py
|
scripts/ec2/terminate_all.py
|
import boto3
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
import boto3
import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
Add import statement for os
|
Add import statement for os
|
Python
|
bsd-2-clause
|
manpen/thrill,manpen/thrill,manpen/thrill,manpen/thrill,manpen/thrill
|
import boto3
+ import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
Add import statement for os
|
## Code Before:
import boto3
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
## Instruction:
Add import statement for os
## Code After:
import boto3
import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
import boto3
+ import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
cec7922ad7636f62be864d115f8e341ac511bbc9
|
numba/tests/foreign_call/test_cffi_call.py
|
numba/tests/foreign_call/test_cffi_call.py
|
import os
import ctypes
import doctest
from numba import *
import numba
try:
import cffi
ffi = cffi.FFI()
except ImportError:
ffi = None
# ______________________________________________________________________
def test():
if ffi is not None:
test_cffi_calls()
# ______________________________________________________________________
# Tests
@autojit(nopython=True)
def call_cffi_func(func, value):
return func(value)
def test_cffi_calls():
# Test printf for nopython and no segfault
ffi.cdef("int printf(char *, ...);")
lib = ffi.dlopen(None)
printf = lib.printf
call_cffi_func(printf, "Hello world!\n")
# ______________________________________________________________________
if __name__ == "__main__":
test()
|
import os
import ctypes
import doctest
from numba import *
import numba
try:
import cffi
ffi = cffi.FFI()
except ImportError:
ffi = None
# ______________________________________________________________________
def test():
if ffi is not None:
test_cffi_calls()
# ______________________________________________________________________
# Tests
@autojit(nopython=True)
def call_cffi_func(func, value):
return func(value)
def test_cffi_calls():
# Test printf for nopython and no segfault
ffi.cdef("int printf(char *, ...);", override=True)
lib = ffi.dlopen(None)
printf = lib.printf
call_cffi_func(printf, "Hello world!\n")
# ______________________________________________________________________
if __name__ == "__main__":
test()
|
Fix CFFI test when executed multiple times
|
Fix CFFI test when executed multiple times
|
Python
|
bsd-2-clause
|
stefanseefeld/numba,jriehl/numba,IntelLabs/numba,stonebig/numba,gmarkall/numba,numba/numba,GaZ3ll3/numba,stefanseefeld/numba,numba/numba,IntelLabs/numba,cpcloud/numba,sklam/numba,cpcloud/numba,IntelLabs/numba,shiquanwang/numba,stuartarchibald/numba,sklam/numba,pitrou/numba,gdementen/numba,gmarkall/numba,seibert/numba,ssarangi/numba,gmarkall/numba,ssarangi/numba,ssarangi/numba,numba/numba,pitrou/numba,jriehl/numba,stefanseefeld/numba,stonebig/numba,jriehl/numba,gdementen/numba,cpcloud/numba,sklam/numba,shiquanwang/numba,pombredanne/numba,pombredanne/numba,pombredanne/numba,gmarkall/numba,stuartarchibald/numba,pombredanne/numba,cpcloud/numba,stuartarchibald/numba,gdementen/numba,GaZ3ll3/numba,seibert/numba,gdementen/numba,seibert/numba,numba/numba,stefanseefeld/numba,ssarangi/numba,stuartarchibald/numba,IntelLabs/numba,cpcloud/numba,gmarkall/numba,gdementen/numba,stonebig/numba,stonebig/numba,stuartarchibald/numba,GaZ3ll3/numba,sklam/numba,seibert/numba,jriehl/numba,seibert/numba,GaZ3ll3/numba,jriehl/numba,pitrou/numba,shiquanwang/numba,GaZ3ll3/numba,sklam/numba,stefanseefeld/numba,IntelLabs/numba,numba/numba,pombredanne/numba,pitrou/numba,pitrou/numba,stonebig/numba,ssarangi/numba
|
import os
import ctypes
import doctest
from numba import *
import numba
try:
import cffi
ffi = cffi.FFI()
except ImportError:
ffi = None
# ______________________________________________________________________
def test():
if ffi is not None:
test_cffi_calls()
# ______________________________________________________________________
# Tests
@autojit(nopython=True)
def call_cffi_func(func, value):
return func(value)
def test_cffi_calls():
# Test printf for nopython and no segfault
- ffi.cdef("int printf(char *, ...);")
+ ffi.cdef("int printf(char *, ...);", override=True)
lib = ffi.dlopen(None)
printf = lib.printf
call_cffi_func(printf, "Hello world!\n")
# ______________________________________________________________________
if __name__ == "__main__":
test()
|
Fix CFFI test when executed multiple times
|
## Code Before:
import os
import ctypes
import doctest
from numba import *
import numba
try:
import cffi
ffi = cffi.FFI()
except ImportError:
ffi = None
# ______________________________________________________________________
def test():
if ffi is not None:
test_cffi_calls()
# ______________________________________________________________________
# Tests
@autojit(nopython=True)
def call_cffi_func(func, value):
return func(value)
def test_cffi_calls():
# Test printf for nopython and no segfault
ffi.cdef("int printf(char *, ...);")
lib = ffi.dlopen(None)
printf = lib.printf
call_cffi_func(printf, "Hello world!\n")
# ______________________________________________________________________
if __name__ == "__main__":
test()
## Instruction:
Fix CFFI test when executed multiple times
## Code After:
import os
import ctypes
import doctest
from numba import *
import numba
try:
import cffi
ffi = cffi.FFI()
except ImportError:
ffi = None
# ______________________________________________________________________
def test():
if ffi is not None:
test_cffi_calls()
# ______________________________________________________________________
# Tests
@autojit(nopython=True)
def call_cffi_func(func, value):
return func(value)
def test_cffi_calls():
# Test printf for nopython and no segfault
ffi.cdef("int printf(char *, ...);", override=True)
lib = ffi.dlopen(None)
printf = lib.printf
call_cffi_func(printf, "Hello world!\n")
# ______________________________________________________________________
if __name__ == "__main__":
test()
|
import os
import ctypes
import doctest
from numba import *
import numba
try:
import cffi
ffi = cffi.FFI()
except ImportError:
ffi = None
# ______________________________________________________________________
def test():
if ffi is not None:
test_cffi_calls()
# ______________________________________________________________________
# Tests
@autojit(nopython=True)
def call_cffi_func(func, value):
return func(value)
def test_cffi_calls():
# Test printf for nopython and no segfault
- ffi.cdef("int printf(char *, ...);")
+ ffi.cdef("int printf(char *, ...);", override=True)
? +++++++++++++++
lib = ffi.dlopen(None)
printf = lib.printf
call_cffi_func(printf, "Hello world!\n")
# ______________________________________________________________________
if __name__ == "__main__":
test()
|
61679e3faf44bc1d54388f617554f03809b2eead
|
gpytorch/kernels/periodic_kernel.py
|
gpytorch/kernels/periodic_kernel.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp().unsqueeze(1)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp()
|
Fix dimensions of periodic kernel parameters
|
Fix dimensions of periodic kernel parameters
|
Python
|
mit
|
jrg365/gpytorch,jrg365/gpytorch,jrg365/gpytorch
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
- nn.Parameter(torch.zeros(1, 1)),
+ nn.Parameter(torch.zeros(1, 1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
- return res.exp().unsqueeze(1)
+ return res.exp()
|
Fix dimensions of periodic kernel parameters
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp().unsqueeze(1)
## Instruction:
Fix dimensions of periodic kernel parameters
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
- nn.Parameter(torch.zeros(1, 1)),
+ nn.Parameter(torch.zeros(1, 1, 1)),
? +++
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
- return res.exp().unsqueeze(1)
? -------------
+ return res.exp()
|
a0585269f05189fb9ae4f5abe98cd36731ad8a53
|
babel_util/scripts/json_to_pajek.py
|
babel_util/scripts/json_to_pajek.py
|
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this.")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.subject and arguments.subject not in entry["subject"]:
continue
for citation in entry["citations"]:
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
|
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this. Can be a comma seperated list.")
parser.add_argument('--wos-only', help="For WoS, exclude any citations or ids that contain a dot (.)", action="store_true")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
subjects = None
if arguments.subject:
subjects = set(arguments.subject.split(","))
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.wos_only and '.' in entry["id"]:
continue
if subjects and not subject.intersection(entry["subject"]):
continue
for citation in entry["citations"]:
if arguments.wos_only and '.' in citation:
continue
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
|
Support for multiple subjects and filtering out non-wos ids
|
Support for multiple subjects and filtering out non-wos ids
|
Python
|
agpl-3.0
|
jevinw/rec_utilities,jevinw/rec_utilities
|
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
- parser.add_argument('--subject', '-s', help="For WoS, subject must include this.")
+ parser.add_argument('--subject', '-s', help="For WoS, subject must include this. Can be a comma seperated list.")
+ parser.add_argument('--wos-only', help="For WoS, exclude any citations or ids that contain a dot (.)", action="store_true")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
+
+ subjects = None
+ if arguments.subject:
+ subjects = set(arguments.subject.split(","))
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
- if arguments.subject and arguments.subject not in entry["subject"]:
+ if arguments.wos_only and '.' in entry["id"]:
+ continue
+
+ if subjects and not subject.intersection(entry["subject"]):
continue
for citation in entry["citations"]:
+ if arguments.wos_only and '.' in citation:
+ continue
+
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
|
Support for multiple subjects and filtering out non-wos ids
|
## Code Before:
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this.")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.subject and arguments.subject not in entry["subject"]:
continue
for citation in entry["citations"]:
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
## Instruction:
Support for multiple subjects and filtering out non-wos ids
## Code After:
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this. Can be a comma seperated list.")
parser.add_argument('--wos-only', help="For WoS, exclude any citations or ids that contain a dot (.)", action="store_true")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
subjects = None
if arguments.subject:
subjects = set(arguments.subject.split(","))
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.wos_only and '.' in entry["id"]:
continue
if subjects and not subject.intersection(entry["subject"]):
continue
for citation in entry["citations"]:
if arguments.wos_only and '.' in citation:
continue
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
|
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
- parser.add_argument('--subject', '-s', help="For WoS, subject must include this.")
+ parser.add_argument('--subject', '-s', help="For WoS, subject must include this. Can be a comma seperated list.")
? +++++++++++++++++++++++++++++++
+ parser.add_argument('--wos-only', help="For WoS, exclude any citations or ids that contain a dot (.)", action="store_true")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
+
+ subjects = None
+ if arguments.subject:
+ subjects = set(arguments.subject.split(","))
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
- if arguments.subject and arguments.subject not in entry["subject"]:
+ if arguments.wos_only and '.' in entry["id"]:
+ continue
+
+ if subjects and not subject.intersection(entry["subject"]):
continue
for citation in entry["citations"]:
+ if arguments.wos_only and '.' in citation:
+ continue
+
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
|
936021e0a7b2f23935f4580c5140c1292a37cf82
|
runbot_pylint/__openerp__.py
|
runbot_pylint/__openerp__.py
|
{
'name': 'Runbot Pylint',
'category': 'Website',
'summary': 'Runbot',
'version': '1.0',
'description': "Runbot",
'author': 'OpenERP SA',
'depends': ['runbot'],
'external_dependencies': {
},
'data': [
"view/runbot_pylint_view.xml"
],
'installable': True,
}
|
{
'name': 'Runbot Pylint',
'category': 'Website',
'summary': 'Runbot',
'version': '1.0',
'description': "Runbot",
'author': 'OpenERP SA',
'depends': ['runbot'],
'external_dependencies': {
'bin': ['pylint'],
},
'data': [
"view/runbot_pylint_view.xml"
],
'installable': True,
}
|
Add external depedencies to pylint bin
|
Add external depedencies to pylint bin
|
Python
|
agpl-3.0
|
amoya-dx/runbot-addons
|
{
'name': 'Runbot Pylint',
'category': 'Website',
'summary': 'Runbot',
'version': '1.0',
'description': "Runbot",
'author': 'OpenERP SA',
'depends': ['runbot'],
'external_dependencies': {
+ 'bin': ['pylint'],
},
'data': [
"view/runbot_pylint_view.xml"
],
'installable': True,
}
|
Add external depedencies to pylint bin
|
## Code Before:
{
'name': 'Runbot Pylint',
'category': 'Website',
'summary': 'Runbot',
'version': '1.0',
'description': "Runbot",
'author': 'OpenERP SA',
'depends': ['runbot'],
'external_dependencies': {
},
'data': [
"view/runbot_pylint_view.xml"
],
'installable': True,
}
## Instruction:
Add external depedencies to pylint bin
## Code After:
{
'name': 'Runbot Pylint',
'category': 'Website',
'summary': 'Runbot',
'version': '1.0',
'description': "Runbot",
'author': 'OpenERP SA',
'depends': ['runbot'],
'external_dependencies': {
'bin': ['pylint'],
},
'data': [
"view/runbot_pylint_view.xml"
],
'installable': True,
}
|
{
'name': 'Runbot Pylint',
'category': 'Website',
'summary': 'Runbot',
'version': '1.0',
'description': "Runbot",
'author': 'OpenERP SA',
'depends': ['runbot'],
'external_dependencies': {
+ 'bin': ['pylint'],
},
'data': [
"view/runbot_pylint_view.xml"
],
'installable': True,
}
|
e9d62c12448822246ad0ed79a90b36dd27429615
|
echidna/demo/server.py
|
echidna/demo/server.py
|
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html", api_server="localhost:8888")
|
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html",
api_server="localhost:8888",
channels=[
("radio_ga_ga", "Radio Ga Ga"),
("channel_x", "Channel X"),
("major_tom", "Major Tom"),
])
|
Add list of channels to demo.html template context.
|
Add list of channels to demo.html template context.
|
Python
|
bsd-3-clause
|
praekelt/echidna,praekelt/echidna,praekelt/echidna,praekelt/echidna
|
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
- self.render("demo.html", api_server="localhost:8888")
+ self.render("demo.html",
+ api_server="localhost:8888",
+ channels=[
+ ("radio_ga_ga", "Radio Ga Ga"),
+ ("channel_x", "Channel X"),
+ ("major_tom", "Major Tom"),
+ ])
|
Add list of channels to demo.html template context.
|
## Code Before:
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html", api_server="localhost:8888")
## Instruction:
Add list of channels to demo.html template context.
## Code After:
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html",
api_server="localhost:8888",
channels=[
("radio_ga_ga", "Radio Ga Ga"),
("channel_x", "Channel X"),
("major_tom", "Major Tom"),
])
|
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
- self.render("demo.html", api_server="localhost:8888")
+ self.render("demo.html",
+ api_server="localhost:8888",
+ channels=[
+ ("radio_ga_ga", "Radio Ga Ga"),
+ ("channel_x", "Channel X"),
+ ("major_tom", "Major Tom"),
+ ])
|
35c76035be66516de744cd4266cf705991023cf2
|
logicaldelete/managers.py
|
logicaldelete/managers.py
|
from django.db import models
class LogicalDeletedManager(models.Manager):
"""
A manager that serves as the default manager for `logicaldelete.models.Model`
providing the filtering out of logically deleted objects. In addition, it
provides named querysets for getting the deleted objects.
"""
def get_query_set(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=True
)
def all_with_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set()
def only_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=False
)
def get(self, *args, **kwargs):
return self.all_with_deleted().get(*args, **kwargs)
def filter(self, *args, **kwargs):
if "pk" in kwargs:
return self.all_with_deleted().filter(*args, **kwargs)
return self.get_query_set().filter(*args, **kwargs)
|
from django.db import models
from logicaldelete.query import LogicalDeleteQuerySet
class LogicalDeletedManager(models.Manager):
"""
A manager that serves as the default manager for `logicaldelete.models.Model`
providing the filtering out of logically deleted objects. In addition, it
provides named querysets for getting the deleted objects.
"""
def get_query_set(self):
if self.model:
return LogicalDeleteQuerySet(self.model, using=self._db).filter(
date_removed__isnull=True
)
def all_with_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set()
def only_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=False
)
def get(self, *args, **kwargs):
return self.all_with_deleted().get(*args, **kwargs)
def filter(self, *args, **kwargs):
if "pk" in kwargs:
return self.all_with_deleted().filter(*args, **kwargs)
return self.get_query_set().filter(*args, **kwargs)
|
Make sure QuerySet.delete() operation does not bypass protection
|
Make sure QuerySet.delete() operation does not bypass protection
This fixes #1
|
Python
|
bsd-3-clause
|
angvp/django-logicaldelete,angvp/django-logical-delete,angvp/django-logicaldelete,Ubiwhere/pinax-models,angvp/django-logical-delete,naringas/pinax-models,pombredanne/django-logicaldelete,pinax/pinax-models
|
from django.db import models
+
+ from logicaldelete.query import LogicalDeleteQuerySet
class LogicalDeletedManager(models.Manager):
"""
A manager that serves as the default manager for `logicaldelete.models.Model`
providing the filtering out of logically deleted objects. In addition, it
provides named querysets for getting the deleted objects.
"""
def get_query_set(self):
if self.model:
- return super(LogicalDeletedManager, self).get_query_set().filter(
+ return LogicalDeleteQuerySet(self.model, using=self._db).filter(
date_removed__isnull=True
)
def all_with_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set()
def only_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=False
)
def get(self, *args, **kwargs):
return self.all_with_deleted().get(*args, **kwargs)
def filter(self, *args, **kwargs):
if "pk" in kwargs:
return self.all_with_deleted().filter(*args, **kwargs)
return self.get_query_set().filter(*args, **kwargs)
|
Make sure QuerySet.delete() operation does not bypass protection
|
## Code Before:
from django.db import models
class LogicalDeletedManager(models.Manager):
"""
A manager that serves as the default manager for `logicaldelete.models.Model`
providing the filtering out of logically deleted objects. In addition, it
provides named querysets for getting the deleted objects.
"""
def get_query_set(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=True
)
def all_with_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set()
def only_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=False
)
def get(self, *args, **kwargs):
return self.all_with_deleted().get(*args, **kwargs)
def filter(self, *args, **kwargs):
if "pk" in kwargs:
return self.all_with_deleted().filter(*args, **kwargs)
return self.get_query_set().filter(*args, **kwargs)
## Instruction:
Make sure QuerySet.delete() operation does not bypass protection
## Code After:
from django.db import models
from logicaldelete.query import LogicalDeleteQuerySet
class LogicalDeletedManager(models.Manager):
"""
A manager that serves as the default manager for `logicaldelete.models.Model`
providing the filtering out of logically deleted objects. In addition, it
provides named querysets for getting the deleted objects.
"""
def get_query_set(self):
if self.model:
return LogicalDeleteQuerySet(self.model, using=self._db).filter(
date_removed__isnull=True
)
def all_with_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set()
def only_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=False
)
def get(self, *args, **kwargs):
return self.all_with_deleted().get(*args, **kwargs)
def filter(self, *args, **kwargs):
if "pk" in kwargs:
return self.all_with_deleted().filter(*args, **kwargs)
return self.get_query_set().filter(*args, **kwargs)
|
from django.db import models
+
+ from logicaldelete.query import LogicalDeleteQuerySet
class LogicalDeletedManager(models.Manager):
"""
A manager that serves as the default manager for `logicaldelete.models.Model`
providing the filtering out of logically deleted objects. In addition, it
provides named querysets for getting the deleted objects.
"""
def get_query_set(self):
if self.model:
- return super(LogicalDeletedManager, self).get_query_set().filter(
+ return LogicalDeleteQuerySet(self.model, using=self._db).filter(
date_removed__isnull=True
)
def all_with_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set()
def only_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=False
)
def get(self, *args, **kwargs):
return self.all_with_deleted().get(*args, **kwargs)
def filter(self, *args, **kwargs):
if "pk" in kwargs:
return self.all_with_deleted().filter(*args, **kwargs)
return self.get_query_set().filter(*args, **kwargs)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.