commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
99dd45582cba9f54a5cc9042812d255fe57b1222
|
oauthclientbridge/__init__.py
|
oauthclientbridge/__init__.py
|
from flask import Flask
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
|
from flask import Flask
try:
from werkzeug.middleware.proxy_fix import ProxyFix
except ImportError:
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
|
Support new location of ProxyFix helper
|
Support new location of ProxyFix helper
|
Python
|
apache-2.0
|
adamcik/oauthclientbridge
|
python
|
## Code Before:
from flask import Flask
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
## Instruction:
Support new location of ProxyFix helper
## Code After:
from flask import Flask
try:
from werkzeug.middleware.proxy_fix import ProxyFix
except ImportError:
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
|
// ... existing code ...
from flask import Flask
try:
from werkzeug.middleware.proxy_fix import ProxyFix
except ImportError:
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
// ... rest of the code ...
|
2baed20067fed71987bf7582fa9c9a5e53a63cb5
|
python/ql/test/experimental/library-tests/frameworks/stdlib/SafeAccessCheck.py
|
python/ql/test/experimental/library-tests/frameworks/stdlib/SafeAccessCheck.py
|
s = "taintedString"
if s.startswith("tainted"): # $checks=s $branch=true
pass
|
s = "taintedString"
if s.startswith("tainted"): # $checks=s $branch=true
pass
sw = s.startswith # $f-:checks=s $f-:branch=true
if sw("safe"):
pass
|
Test false negative from review
|
Python: Test false negative from review
|
Python
|
mit
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
python
|
## Code Before:
s = "taintedString"
if s.startswith("tainted"): # $checks=s $branch=true
pass
## Instruction:
Python: Test false negative from review
## Code After:
s = "taintedString"
if s.startswith("tainted"): # $checks=s $branch=true
pass
sw = s.startswith # $f-:checks=s $f-:branch=true
if sw("safe"):
pass
|
...
if s.startswith("tainted"): # $checks=s $branch=true
pass
sw = s.startswith # $f-:checks=s $f-:branch=true
if sw("safe"):
pass
...
|
0197553740ff6b542515cb53ce816d629e7b5648
|
tspapi/__init__.py
|
tspapi/__init__.py
|
from __future__ import absolute_import
from tspapi.api_call import _ApiCall
from tspapi.api import API
from tspapi.measurement import Measurement
from tspapi.api_exception import ConnectionError
from tspapi.api_exception import HTTPResponseError
from tspapi.source import Source
from tspapi.event import RawEvent
from tspapi.event import Event
from tspapi.metric import Metric
|
from __future__ import absolute_import
from tspapi.api_exception import ConnectionError
from tspapi.api_exception import HTTPResponseError
from tspapi.measurement import Measurement
from tspapi.source import Source
from tspapi.event import RawEvent
from tspapi.event import Event
from tspapi.metric import Metric
from tspapi.api_call import _ApiCall
from tspapi.api import API
|
Rearrange imports for proper dependencies
|
Rearrange imports for proper dependencies
|
Python
|
apache-2.0
|
jdgwartney/pulse-api-python
|
python
|
## Code Before:
from __future__ import absolute_import
from tspapi.api_call import _ApiCall
from tspapi.api import API
from tspapi.measurement import Measurement
from tspapi.api_exception import ConnectionError
from tspapi.api_exception import HTTPResponseError
from tspapi.source import Source
from tspapi.event import RawEvent
from tspapi.event import Event
from tspapi.metric import Metric
## Instruction:
Rearrange imports for proper dependencies
## Code After:
from __future__ import absolute_import
from tspapi.api_exception import ConnectionError
from tspapi.api_exception import HTTPResponseError
from tspapi.measurement import Measurement
from tspapi.source import Source
from tspapi.event import RawEvent
from tspapi.event import Event
from tspapi.metric import Metric
from tspapi.api_call import _ApiCall
from tspapi.api import API
|
...
from __future__ import absolute_import
from tspapi.api_exception import ConnectionError
from tspapi.api_exception import HTTPResponseError
from tspapi.measurement import Measurement
from tspapi.source import Source
from tspapi.event import RawEvent
from tspapi.event import Event
from tspapi.metric import Metric
from tspapi.api_call import _ApiCall
from tspapi.api import API
...
|
9abbb0b79da1466d2719496b479e43a74e798b97
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="librobinson",
version="0.1",
packages=find_packages(),
scripts=['robinson'],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
#install_requires=['docutils>=0.3'],
package_data={
# If any package contains *.txt or *.md files, include them:
'': ['*.txt', '*.md'],
# And include any *.msg files found in the 'hello' package, too:
#'hello': ['*.msg'],
},
# metadata for upload to PyPI
author="Ulrik Sandborg-Petersen",
author_email="[email protected]",
description="A library to parse and convert the New Testament Greek files of Dr. Maurice A. Robinson",
license="MIT",
keywords="Maurice A. Robinson, New Testament Greek, parse, convert",
url="http://github.com/byztxt/librobinson"
# could also include long_description, download_url, classifiers, etc.
)
|
from setuptools import setup, find_packages
setup(
name="librobinson",
version="0.2.0",
packages=find_packages(),
scripts=[
'robinson/booknames.py',
'robinson/book.py',
'robinson/chapter.py',
'robinson/convert.py',
'robinson/__init__.py',
'robinson/kind.py',
'robinson/reader.py',
'robinson/readwhat.py',
'robinson/robinson.py',
'robinson/robinsontags.py',
'robinson/variant.py',
'robinson/verse.py',
'robinson/word.py',
],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
#install_requires=['docutils>=0.3'],
package_data={
# If any package contains *.txt or *.md files, include them:
'': ['*.txt', '*.md'],
# And include any *.msg files found in the 'hello' package, too:
#'hello': ['*.msg'],
},
# metadata for upload to PyPI
author="Ulrik Sandborg-Petersen",
author_email="[email protected]",
description="A library to parse and convert the New Testament Greek files of Dr. Maurice A. Robinson",
license="MIT",
keywords="Maurice A. Robinson, New Testament Greek, parse, convert",
url="http://github.com/byztxt/librobinson"
# could also include long_description, download_url, classifiers, etc.
)
|
Add all python files explicitly, and bump to version 0.2.0
|
Add all python files explicitly, and bump to version 0.2.0
|
Python
|
mit
|
byztxt/librobinson
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name="librobinson",
version="0.1",
packages=find_packages(),
scripts=['robinson'],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
#install_requires=['docutils>=0.3'],
package_data={
# If any package contains *.txt or *.md files, include them:
'': ['*.txt', '*.md'],
# And include any *.msg files found in the 'hello' package, too:
#'hello': ['*.msg'],
},
# metadata for upload to PyPI
author="Ulrik Sandborg-Petersen",
author_email="[email protected]",
description="A library to parse and convert the New Testament Greek files of Dr. Maurice A. Robinson",
license="MIT",
keywords="Maurice A. Robinson, New Testament Greek, parse, convert",
url="http://github.com/byztxt/librobinson"
# could also include long_description, download_url, classifiers, etc.
)
## Instruction:
Add all python files explicitly, and bump to version 0.2.0
## Code After:
from setuptools import setup, find_packages
setup(
name="librobinson",
version="0.2.0",
packages=find_packages(),
scripts=[
'robinson/booknames.py',
'robinson/book.py',
'robinson/chapter.py',
'robinson/convert.py',
'robinson/__init__.py',
'robinson/kind.py',
'robinson/reader.py',
'robinson/readwhat.py',
'robinson/robinson.py',
'robinson/robinsontags.py',
'robinson/variant.py',
'robinson/verse.py',
'robinson/word.py',
],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
#install_requires=['docutils>=0.3'],
package_data={
# If any package contains *.txt or *.md files, include them:
'': ['*.txt', '*.md'],
# And include any *.msg files found in the 'hello' package, too:
#'hello': ['*.msg'],
},
# metadata for upload to PyPI
author="Ulrik Sandborg-Petersen",
author_email="[email protected]",
description="A library to parse and convert the New Testament Greek files of Dr. Maurice A. Robinson",
license="MIT",
keywords="Maurice A. Robinson, New Testament Greek, parse, convert",
url="http://github.com/byztxt/librobinson"
# could also include long_description, download_url, classifiers, etc.
)
|
// ... existing code ...
from setuptools import setup, find_packages
setup(
name="librobinson",
version="0.2.0",
packages=find_packages(),
scripts=[
'robinson/booknames.py',
'robinson/book.py',
'robinson/chapter.py',
'robinson/convert.py',
'robinson/__init__.py',
'robinson/kind.py',
'robinson/reader.py',
'robinson/readwhat.py',
'robinson/robinson.py',
'robinson/robinsontags.py',
'robinson/variant.py',
'robinson/verse.py',
'robinson/word.py',
],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
// ... rest of the code ...
|
eca911a1b1623368f991dbf47002c0b59abc15db
|
script/lib/config.py
|
script/lib/config.py
|
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '1df8e7cdac8aa74c91c19ae0691ce512d560ab3e'
|
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'aa4874a6bcc51fdd87ca7ae0928514ce83645988'
|
Update libchromiumcontent: Suppress CFAllocator warning.
|
Update libchromiumcontent: Suppress CFAllocator warning.
|
Python
|
mit
|
jjz/electron,Ivshti/electron,howmuchcomputer/electron,simonfork/electron,takashi/electron,SufianHassan/electron,micalan/electron,tylergibson/electron,Gerhut/electron,mattdesl/electron,edulan/electron,edulan/electron,d-salas/electron,gerhardberger/electron,micalan/electron,brenca/electron,stevekinney/electron,pandoraui/electron,rreimann/electron,Jacobichou/electron,SufianHassan/electron,bright-sparks/electron,sshiting/electron,dongjoon-hyun/electron,bpasero/electron,neutrous/electron,mattotodd/electron,shennushi/electron,SufianHassan/electron,dahal/electron,DivyaKMenon/electron,dongjoon-hyun/electron,roadev/electron,aichingm/electron,kokdemo/electron,coderhaoxin/electron,JussMee15/electron,Jonekee/electron,faizalpribadi/electron,pirafrank/electron,mjaniszew/electron,shiftkey/electron,mjaniszew/electron,felixrieseberg/electron,deepak1556/atom-shell,aliib/electron,RIAEvangelist/electron,wan-qy/electron,thingsinjars/electron,takashi/electron,vHanda/electron,cqqccqc/electron,kokdemo/electron,kikong/electron,natgolov/electron,darwin/electron,LadyNaggaga/electron,iftekeriba/electron,d-salas/electron,Zagorakiss/electron,jacksondc/electron,bpasero/electron,eric-seekas/electron,Ivshti/electron,ianscrivener/electron,eric-seekas/electron,farmisen/electron,tonyganch/electron,yalexx/electron,michaelchiche/electron,GoooIce/electron,kcrt/electron,DivyaKMenon/electron,aliib/electron,jtburke/electron,cos2004/electron,Neron-X5/electron,shaundunne/electron,systembugtj/electron,brave/muon,kazupon/electron,jacksondc/electron,coderhaoxin/electron,micalan/electron,kenmozi/electron,arusakov/electron,abhishekgahlot/electron,simonfork/electron,jjz/electron,deed02392/electron,natgolov/electron,gbn972/electron,arturts/electron,preco21/electron,JesselJohn/electron,natgolov/electron,mrwizard82d1/electron,kenmozi/electron,subblue/electron,beni55/electron,yalexx/electron,adcentury/electron,biblerule/UMCTelnetHub,mjaniszew/electron,astoilkov/electron,ervinb/electron,aliib/electron,christian-bromann/electron,astoilkov/electron,fffej/electron,seanchas116/electron,Floato/electron,thompsonemerson/electron,etiktin/electron,shennushi/electron,chrisswk/electron,adamjgray/electron,jjz/electron,iftekeriba/electron,thingsinjars/electron,simongregory/electron,christian-bromann/electron,seanchas116/electron,MaxWhere/electron,gabrielPeart/electron,MaxGraey/electron,trigrass2/electron,soulteary/electron,fabien-d/electron,bobwol/electron,vHanda/electron,neutrous/electron,beni55/electron,voidbridge/electron,miniak/electron,John-Lin/electron,saronwei/electron,brave/electron,jhen0409/electron,tomashanacek/electron,BionicClick/electron,matiasinsaurralde/electron,LadyNaggaga/electron,systembugtj/electron,webmechanicx/electron,micalan/electron,subblue/electron,rhencke/electron,abhishekgahlot/electron,leethomas/electron,abhishekgahlot/electron,vHanda/electron,howmuchcomputer/electron,destan/electron,mirrh/electron,trigrass2/electron,nicobot/electron,pombredanne/electron,jonatasfreitasv/electron,JussMee15/electron,iftekeriba/electron,ervinb/electron,neutrous/electron,takashi/electron,gabrielPeart/electron,preco21/electron,ankitaggarwal011/electron,xiruibing/electron,simongregory/electron,gabriel/electron,the-ress/electron,mjaniszew/electron,gamedevsam/electron,felixrieseberg/electron,bright-sparks/electron,nagyistoce/electron-atom-shell,seanchas116/electron,JussMee15/electron,minggo/electron,synaptek/electron,gerhardberger/electron,zhakui/electron,anko/electron,RobertJGabriel/electron,medixdev/electron,bpasero/electron,jjz/electron,lzpfmh/electron,pandoraui/electron,nicholasess/electron,evgenyzinoviev/electron,pirafrank/electron,mirrh/electron,vaginessa/electron,kcrt/electron,pandoraui/electron,timruffles/electron,aaron-goshine/electron,beni55/electron,GoooIce/electron,RobertJGabriel/electron,bitemyapp/electron,kazupon/electron,robinvandernoord/electron,eric-seekas/electron,eriser/electron,MaxWhere/electron,yalexx/electron,lrlna/electron,jannishuebl/electron,Evercoder/electron,natgolov/electron,cqqccqc/electron,digideskio/electron,kenmozi/electron,renaesop/electron,lrlna/electron,kostia/electron,tomashanacek/electron,jlord/electron,stevekinney/electron,joaomoreno/atom-shell,the-ress/electron,chriskdon/electron,farmisen/electron,jjz/electron,shaundunne/electron,vipulroxx/electron,jsutcodes/electron,sshiting/electron,christian-bromann/electron,simongregory/electron,miniak/electron,beni55/electron,christian-bromann/electron,bbondy/electron,tonyganch/electron,voidbridge/electron,setzer777/electron,icattlecoder/electron,kokdemo/electron,chriskdon/electron,jtburke/electron,carsonmcdonald/electron,joneit/electron,destan/electron,tonyganch/electron,twolfson/electron,felixrieseberg/electron,Faiz7412/electron,yalexx/electron,wan-qy/electron,aichingm/electron,jannishuebl/electron,leethomas/electron,sircharleswatson/electron,Jonekee/electron,nagyistoce/electron-atom-shell,miniak/electron,Faiz7412/electron,Andrey-Pavlov/electron,dahal/electron,arturts/electron,setzer777/electron,thompsonemerson/electron,d-salas/electron,evgenyzinoviev/electron,destan/electron,electron/electron,arusakov/electron,fabien-d/electron,matiasinsaurralde/electron,fabien-d/electron,Floato/electron,mrwizard82d1/electron,xiruibing/electron,ervinb/electron,mattotodd/electron,ianscrivener/electron,kikong/electron,nekuz0r/electron,gbn972/electron,medixdev/electron,Jacobichou/electron,tonyganch/electron,vHanda/electron,bright-sparks/electron,gerhardberger/electron,bbondy/electron,timruffles/electron,Gerhut/electron,tylergibson/electron,subblue/electron,nicobot/electron,gamedevsam/electron,posix4e/electron,sircharleswatson/electron,yan-foto/electron,nekuz0r/electron,setzer777/electron,bpasero/electron,brave/electron,pandoraui/electron,posix4e/electron,MaxWhere/electron,rreimann/electron,timruffles/electron,subblue/electron,jlhbaseball15/electron,sshiting/electron,twolfson/electron,gamedevsam/electron,ianscrivener/electron,Jonekee/electron,xfstudio/electron,tomashanacek/electron,evgenyzinoviev/electron,pirafrank/electron,nagyistoce/electron-atom-shell,jacksondc/electron,carsonmcdonald/electron,aichingm/electron,vaginessa/electron,bwiggs/electron,adcentury/electron,MaxWhere/electron,aliib/electron,dongjoon-hyun/electron,Faiz7412/electron,wan-qy/electron,mirrh/electron,subblue/electron,shockone/electron,bobwol/electron,electron/electron,roadev/electron,cqqccqc/electron,shiftkey/electron,edulan/electron,jlord/electron,deed02392/electron,John-Lin/electron,GoooIce/electron,noikiy/electron,digideskio/electron,timruffles/electron,MaxWhere/electron,rprichard/electron,trigrass2/electron,chrisswk/electron,ankitaggarwal011/electron,fabien-d/electron,jonatasfreitasv/electron,rhencke/electron,rreimann/electron,RIAEvangelist/electron,jiaz/electron,carsonmcdonald/electron,kokdemo/electron,webmechanicx/electron,lrlna/electron,yan-foto/electron,edulan/electron,wolfflow/electron,jhen0409/electron,the-ress/electron,edulan/electron,miniak/electron,smczk/electron,maxogden/atom-shell,fomojola/electron,brave/muon,RIAEvangelist/electron,jcblw/electron,jcblw/electron,nekuz0r/electron,Evercoder/electron,icattlecoder/electron,sky7sea/electron,gamedevsam/electron,fomojola/electron,fritx/electron,electron/electron,shockone/electron,bruce/electron,zhakui/electron,gerhardberger/electron,shennushi/electron,systembugtj/electron,adcentury/electron,arturts/electron,LadyNaggaga/electron,icattlecoder/electron,gamedevsam/electron,astoilkov/electron,gbn972/electron,bruce/electron,mattdesl/electron,mrwizard82d1/electron,brave/electron,mattotodd/electron,deepak1556/atom-shell,kcrt/electron,jlord/electron,rajatsingla28/electron,IonicaBizauKitchen/electron,bpasero/electron,tylergibson/electron,wolfflow/electron,pombredanne/electron,baiwyc119/electron,howmuchcomputer/electron,leolujuyi/electron,brenca/electron,Ivshti/electron,jonatasfreitasv/electron,jannishuebl/electron,jlhbaseball15/electron,synaptek/electron,dkfiresky/electron,greyhwndz/electron,bwiggs/electron,Andrey-Pavlov/electron,miniak/electron,iftekeriba/electron,medixdev/electron,bright-sparks/electron,nicobot/electron,noikiy/electron,Evercoder/electron,mubassirhayat/electron,bwiggs/electron,renaesop/electron,gabriel/electron,trankmichael/electron,soulteary/electron,felixrieseberg/electron,oiledCode/electron,mattotodd/electron,the-ress/electron,thingsinjars/electron,felixrieseberg/electron,tylergibson/electron,nicholasess/electron,minggo/electron,icattlecoder/electron,lrlna/electron,fireball-x/atom-shell,synaptek/electron,jsutcodes/electron,jjz/electron,shockone/electron,smczk/electron,fomojola/electron,yalexx/electron,tinydew4/electron,egoist/electron,mirrh/electron,meowlab/electron,wolfflow/electron,dahal/electron,sircharleswatson/electron,tincan24/electron,Gerhut/electron,voidbridge/electron,jlhbaseball15/electron,cos2004/electron,stevekinney/electron,neutrous/electron,preco21/electron,stevemao/electron,meowlab/electron,gabrielPeart/electron,kikong/electron,faizalpribadi/electron,arusakov/electron,Zagorakiss/electron,mattdesl/electron,tylergibson/electron,SufianHassan/electron,yan-foto/electron,dkfiresky/electron,mirrh/electron,LadyNaggaga/electron,cos2004/electron,kenmozi/electron,stevekinney/electron,RIAEvangelist/electron,brenca/electron,fritx/electron,astoilkov/electron,ianscrivener/electron,carsonmcdonald/electron,biblerule/UMCTelnetHub,Faiz7412/electron,farmisen/electron,arturts/electron,leolujuyi/electron,Neron-X5/electron,digideskio/electron,jtburke/electron,deed02392/electron,Rokt33r/electron,JesselJohn/electron,thompsonemerson/electron,neutrous/electron,yalexx/electron,coderhaoxin/electron,xiruibing/electron,bbondy/electron,the-ress/electron,jacksondc/electron,IonicaBizauKitchen/electron,renaesop/electron,BionicClick/electron,ianscrivener/electron,bpasero/electron,baiwyc119/electron,jannishuebl/electron,joneit/electron,thomsonreuters/electron,trankmichael/electron,eriser/electron,gerhardberger/electron,webmechanicx/electron,leftstick/electron,jaanus/electron,maxogden/atom-shell,Rokt33r/electron,MaxGraey/electron,rajatsingla28/electron,joaomoreno/atom-shell,eriser/electron,leolujuyi/electron,JesselJohn/electron,gamedevsam/electron,digideskio/electron,Jacobichou/electron,etiktin/electron,matiasinsaurralde/electron,robinvandernoord/electron,JussMee15/electron,greyhwndz/electron,rhencke/electron,minggo/electron,anko/electron,jsutcodes/electron,fffej/electron,jtburke/electron,deepak1556/atom-shell,jannishuebl/electron,bwiggs/electron,evgenyzinoviev/electron,tomashanacek/electron,noikiy/electron,vaginessa/electron,faizalpribadi/electron,DivyaKMenon/electron,davazp/electron,dongjoon-hyun/electron,davazp/electron,mattotodd/electron,jtburke/electron,deed02392/electron,fireball-x/atom-shell,RobertJGabriel/electron,Andrey-Pavlov/electron,stevekinney/electron,anko/electron,egoist/electron,takashi/electron,astoilkov/electron,sircharleswatson/electron,dahal/electron,mhkeller/electron,nekuz0r/electron,minggo/electron,coderhaoxin/electron,adamjgray/electron,gabrielPeart/electron,JussMee15/electron,aaron-goshine/electron,tincan24/electron,the-ress/electron,aaron-goshine/electron,jaanus/electron,LadyNaggaga/electron,jsutcodes/electron,stevekinney/electron,abhishekgahlot/electron,mattotodd/electron,Gerhut/electron,adamjgray/electron,tincan24/electron,preco21/electron,saronwei/electron,baiwyc119/electron,ankitaggarwal011/electron,wan-qy/electron,JesselJohn/electron,jsutcodes/electron,posix4e/electron,John-Lin/electron,wan-qy/electron,Andrey-Pavlov/electron,posix4e/electron,gstack/infinium-shell,pirafrank/electron,John-Lin/electron,aaron-goshine/electron,RobertJGabriel/electron,ankitaggarwal011/electron,synaptek/electron,electron/electron,arusakov/electron,leethomas/electron,shiftkey/electron,roadev/electron,miniak/electron,chriskdon/electron,shockone/electron,xfstudio/electron,seanchas116/electron,medixdev/electron,cos2004/electron,Jonekee/electron,rajatsingla28/electron,electron/electron,rsvip/electron,RIAEvangelist/electron,rreimann/electron,Evercoder/electron,gabriel/electron,carsonmcdonald/electron,fireball-x/atom-shell,posix4e/electron,Jacobichou/electron,pombredanne/electron,bruce/electron,DivyaKMenon/electron,brave/electron,smczk/electron,minggo/electron,shaundunne/electron,stevemao/electron,jtburke/electron,subblue/electron,jhen0409/electron,jonatasfreitasv/electron,jiaz/electron,fritx/electron,systembugtj/electron,oiledCode/electron,greyhwndz/electron,baiwyc119/electron,cos2004/electron,rsvip/electron,Floato/electron,mjaniszew/electron,jiaz/electron,Neron-X5/electron,hokein/atom-shell,IonicaBizauKitchen/electron,digideskio/electron,tinydew4/electron,adcentury/electron,fireball-x/atom-shell,sky7sea/electron,nicobot/electron,vipulroxx/electron,biblerule/UMCTelnetHub,nicholasess/electron,LadyNaggaga/electron,ervinb/electron,ankitaggarwal011/electron,biblerule/UMCTelnetHub,aliib/electron,darwin/electron,etiktin/electron,chriskdon/electron,webmechanicx/electron,michaelchiche/electron,MaxGraey/electron,lzpfmh/electron,bitemyapp/electron,iftekeriba/electron,MaxGraey/electron,edulan/electron,brenca/electron,baiwyc119/electron,gabriel/electron,Ivshti/electron,bruce/electron,tinydew4/electron,tinydew4/electron,IonicaBizauKitchen/electron,rhencke/electron,eriser/electron,kazupon/electron,kokdemo/electron,fritx/electron,joaomoreno/atom-shell,benweissmann/electron,bruce/electron,thompsonemerson/electron,fireball-x/atom-shell,fomojola/electron,lrlna/electron,voidbridge/electron,kostia/electron,leftstick/electron,mhkeller/electron,pandoraui/electron,ervinb/electron,meowlab/electron,fffej/electron,aecca/electron,webmechanicx/electron,Gerhut/electron,aaron-goshine/electron,nicobot/electron,sky7sea/electron,trankmichael/electron,tylergibson/electron,JussMee15/electron,mubassirhayat/electron,Floato/electron,rajatsingla28/electron,brave/muon,shennushi/electron,ianscrivener/electron,xfstudio/electron,shockone/electron,Gerhut/electron,coderhaoxin/electron,eric-seekas/electron,medixdev/electron,fritx/electron,meowlab/electron,lzpfmh/electron,davazp/electron,cqqccqc/electron,lzpfmh/electron,GoooIce/electron,sircharleswatson/electron,sshiting/electron,faizalpribadi/electron,egoist/electron,bright-sparks/electron,rreimann/electron,christian-bromann/electron,synaptek/electron,voidbridge/electron,stevemao/electron,stevemao/electron,yan-foto/electron,gabrielPeart/electron,timruffles/electron,michaelchiche/electron,vaginessa/electron,Zagorakiss/electron,trigrass2/electron,tincan24/electron,shennushi/electron,thomsonreuters/electron,icattlecoder/electron,tinydew4/electron,Rokt33r/electron,destan/electron,roadev/electron,rsvip/electron,xfstudio/electron,gbn972/electron,egoist/electron,jlhbaseball15/electron,systembugtj/electron,anko/electron,d-salas/electron,leolujuyi/electron,Evercoder/electron,kostia/electron,bobwol/electron,aecca/electron,deed02392/electron,GoooIce/electron,jlord/electron,systembugtj/electron,d-salas/electron,jiaz/electron,mjaniszew/electron,lzpfmh/electron,zhakui/electron,Jonekee/electron,rhencke/electron,howmuchcomputer/electron,vHanda/electron,jlhbaseball15/electron,electron/electron,seanchas116/electron,kikong/electron,kazupon/electron,MaxGraey/electron,shiftkey/electron,michaelchiche/electron,simonfork/electron,micalan/electron,leftstick/electron,voidbridge/electron,jiaz/electron,bobwol/electron,kostia/electron,bbondy/electron,GoooIce/electron,tinydew4/electron,cqqccqc/electron,chrisswk/electron,hokein/atom-shell,robinvandernoord/electron,robinvandernoord/electron,maxogden/atom-shell,John-Lin/electron,jaanus/electron,tincan24/electron,shiftkey/electron,natgolov/electron,astoilkov/electron,pandoraui/electron,mirrh/electron,michaelchiche/electron,rreimann/electron,oiledCode/electron,deed02392/electron,mhkeller/electron,thingsinjars/electron,mattdesl/electron,RobertJGabriel/electron,noikiy/electron,brave/electron,BionicClick/electron,beni55/electron,thomsonreuters/electron,renaesop/electron,matiasinsaurralde/electron,gabrielPeart/electron,simongregory/electron,greyhwndz/electron,dahal/electron,nicobot/electron,tomashanacek/electron,jacksondc/electron,Evercoder/electron,bitemyapp/electron,saronwei/electron,jhen0409/electron,brave/electron,vHanda/electron,benweissmann/electron,brenca/electron,wolfflow/electron,tonyganch/electron,Floato/electron,Ivshti/electron,smczk/electron,deepak1556/atom-shell,shiftkey/electron,hokein/atom-shell,mhkeller/electron,ervinb/electron,zhakui/electron,christian-bromann/electron,aecca/electron,jonatasfreitasv/electron,beni55/electron,yan-foto/electron,joneit/electron,adamjgray/electron,twolfson/electron,zhakui/electron,bruce/electron,digideskio/electron,trankmichael/electron,oiledCode/electron,saronwei/electron,Zagorakiss/electron,rajatsingla28/electron,rprichard/electron,seanchas116/electron,Rokt33r/electron,jacksondc/electron,arusakov/electron,nicholasess/electron,mubassirhayat/electron,nekuz0r/electron,shaundunne/electron,icattlecoder/electron,gstack/infinium-shell,fritx/electron,howmuchcomputer/electron,anko/electron,aliib/electron,leftstick/electron,bitemyapp/electron,wolfflow/electron,rprichard/electron,mrwizard82d1/electron,trigrass2/electron,twolfson/electron,dkfiresky/electron,nagyistoce/electron-atom-shell,shennushi/electron,nicholasess/electron,kikong/electron,kostia/electron,maxogden/atom-shell,natgolov/electron,brave/muon,gerhardberger/electron,arusakov/electron,rsvip/electron,benweissmann/electron,Jacobichou/electron,oiledCode/electron,bwiggs/electron,sshiting/electron,fabien-d/electron,soulteary/electron,the-ress/electron,egoist/electron,kcrt/electron,jhen0409/electron,rprichard/electron,d-salas/electron,fffej/electron,adamjgray/electron,meowlab/electron,darwin/electron,bobwol/electron,renaesop/electron,kostia/electron,adcentury/electron,jcblw/electron,etiktin/electron,saronwei/electron,Zagorakiss/electron,joaomoreno/atom-shell,davazp/electron,thomsonreuters/electron,lrlna/electron,posix4e/electron,joneit/electron,simongregory/electron,thompsonemerson/electron,faizalpribadi/electron,dkfiresky/electron,gstack/infinium-shell,jannishuebl/electron,bobwol/electron,pombredanne/electron,jcblw/electron,howmuchcomputer/electron,Jacobichou/electron,gbn972/electron,Neron-X5/electron,oiledCode/electron,wolfflow/electron,brave/muon,destan/electron,Floato/electron,benweissmann/electron,tincan24/electron,soulteary/electron,kcrt/electron,bitemyapp/electron,simonfork/electron,evgenyzinoviev/electron,trigrass2/electron,biblerule/UMCTelnetHub,coderhaoxin/electron,shockone/electron,iftekeriba/electron,thompsonemerson/electron,jaanus/electron,stevemao/electron,Jonekee/electron,farmisen/electron,medixdev/electron,mrwizard82d1/electron,BionicClick/electron,jaanus/electron,adamjgray/electron,baiwyc119/electron,Andrey-Pavlov/electron,matiasinsaurralde/electron,maxogden/atom-shell,setzer777/electron,benweissmann/electron,nicholasess/electron,electron/electron,MaxWhere/electron,twolfson/electron,biblerule/UMCTelnetHub,gabriel/electron,vaginessa/electron,DivyaKMenon/electron,robinvandernoord/electron,jhen0409/electron,dkfiresky/electron,setzer777/electron,leftstick/electron,vaginessa/electron,rajatsingla28/electron,deepak1556/atom-shell,vipulroxx/electron,bitemyapp/electron,aaron-goshine/electron,davazp/electron,jcblw/electron,mattdesl/electron,wan-qy/electron,trankmichael/electron,trankmichael/electron,farmisen/electron,rsvip/electron,vipulroxx/electron,aichingm/electron,eric-seekas/electron,jsutcodes/electron,micalan/electron,aecca/electron,thomsonreuters/electron,davazp/electron,ankitaggarwal011/electron,fffej/electron,mrwizard82d1/electron,webmechanicx/electron,xiruibing/electron,adcentury/electron,synaptek/electron,matiasinsaurralde/electron,lzpfmh/electron,DivyaKMenon/electron,soulteary/electron,kazupon/electron,RobertJGabriel/electron,sky7sea/electron,chrisswk/electron,IonicaBizauKitchen/electron,Neron-X5/electron,nekuz0r/electron,jonatasfreitasv/electron,michaelchiche/electron,gstack/infinium-shell,etiktin/electron,xfstudio/electron,mubassirhayat/electron,Andrey-Pavlov/electron,chrisswk/electron,nagyistoce/electron-atom-shell,fomojola/electron,smczk/electron,stevemao/electron,kenmozi/electron,carsonmcdonald/electron,eric-seekas/electron,neutrous/electron,thomsonreuters/electron,evgenyzinoviev/electron,simonfork/electron,leethomas/electron,vipulroxx/electron,takashi/electron,leethomas/electron,SufianHassan/electron,dahal/electron,JesselJohn/electron,joneit/electron,sky7sea/electron,jaanus/electron,darwin/electron,simonfork/electron,bpasero/electron,xfstudio/electron,greyhwndz/electron,twolfson/electron,leftstick/electron,aecca/electron,arturts/electron,jiaz/electron,chriskdon/electron,mhkeller/electron,roadev/electron,egoist/electron,sircharleswatson/electron,kazupon/electron,noikiy/electron,bbondy/electron,brenca/electron,joneit/electron,fomojola/electron,thingsinjars/electron,dongjoon-hyun/electron,minggo/electron,tomashanacek/electron,Neron-X5/electron,aecca/electron,dongjoon-hyun/electron,pirafrank/electron,zhakui/electron,greyhwndz/electron,yan-foto/electron,Rokt33r/electron,xiruibing/electron,xiruibing/electron,roadev/electron,soulteary/electron,mhkeller/electron,John-Lin/electron,leolujuyi/electron,gstack/infinium-shell,smczk/electron,destan/electron,setzer777/electron,Zagorakiss/electron,bbondy/electron,gerhardberger/electron,preco21/electron,chriskdon/electron,jcblw/electron,bright-sparks/electron,eriser/electron,Rokt33r/electron,gabriel/electron,leolujuyi/electron,fffej/electron,renaesop/electron,tonyganch/electron,meowlab/electron,aichingm/electron,sky7sea/electron,robinvandernoord/electron,sshiting/electron,vipulroxx/electron,leethomas/electron,benweissmann/electron,darwin/electron,thingsinjars/electron,preco21/electron,abhishekgahlot/electron,joaomoreno/atom-shell,mubassirhayat/electron,kokdemo/electron,farmisen/electron,bwiggs/electron,cos2004/electron,joaomoreno/atom-shell,shaundunne/electron,kcrt/electron,hokein/atom-shell,SufianHassan/electron,felixrieseberg/electron,Faiz7412/electron,eriser/electron,aichingm/electron,mattdesl/electron,RIAEvangelist/electron,gbn972/electron,dkfiresky/electron,takashi/electron,noikiy/electron,IonicaBizauKitchen/electron,jlord/electron,shaundunne/electron,JesselJohn/electron,pirafrank/electron,BionicClick/electron,anko/electron,pombredanne/electron,abhishekgahlot/electron,faizalpribadi/electron,simongregory/electron,rhencke/electron,arturts/electron,hokein/atom-shell,saronwei/electron,cqqccqc/electron,brave/muon,kenmozi/electron,etiktin/electron,pombredanne/electron,jlhbaseball15/electron,BionicClick/electron
|
python
|
## Code Before:
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '1df8e7cdac8aa74c91c19ae0691ce512d560ab3e'
## Instruction:
Update libchromiumcontent: Suppress CFAllocator warning.
## Code After:
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'aa4874a6bcc51fdd87ca7ae0928514ce83645988'
|
// ... existing code ...
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'aa4874a6bcc51fdd87ca7ae0928514ce83645988'
// ... rest of the code ...
|
0b05101696989a3b88a4a630a6886c5db142175b
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'irccat = pydle.utils.irccat:main',
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main'
]
},
author='Shiz',
author_email='[email protected]',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
|
from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main',
'pydle-irccat = pydle.utils.irccat:main'
]
},
author='Shiz',
author_email='[email protected]',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
|
Install irccat as pydle-irccat instead.
|
Install irccat as pydle-irccat instead.
|
Python
|
bsd-3-clause
|
Shizmob/pydle
|
python
|
## Code Before:
from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'irccat = pydle.utils.irccat:main',
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main'
]
},
author='Shiz',
author_email='[email protected]',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
## Instruction:
Install irccat as pydle-irccat instead.
## Code After:
from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main',
'pydle-irccat = pydle.utils.irccat:main'
]
},
author='Shiz',
author_email='[email protected]',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
|
// ... existing code ...
},
entry_points={
'console_scripts': [
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main',
'pydle-irccat = pydle.utils.irccat:main'
]
},
// ... rest of the code ...
|
3f7a9d900a1f2cd2f5522735815c999040a920e0
|
pajbot/web/routes/api/users.py
|
pajbot/web/routes/api/users.py
|
from flask_restful import Resource
from pajbot.managers.redis import RedisManager
from pajbot.managers.user import UserManager
from pajbot.streamhelper import StreamHelper
class APIUser(Resource):
@staticmethod
def get(username):
user = UserManager.find_static(username)
if not user:
return {"error": "Not found"}, 404
redis = RedisManager.get()
key = "{streamer}:users:num_lines".format(streamer=StreamHelper.get_streamer())
rank = redis.zrevrank(key, user.username)
if rank is None:
rank = redis.zcard(key)
else:
rank = rank + 1
return user.jsonify()
def init(api):
api.add_resource(APIUser, "/users/<username>")
|
from flask_restful import Resource
from pajbot.managers.redis import RedisManager
from pajbot.managers.user import UserManager
from pajbot.streamhelper import StreamHelper
class APIUser(Resource):
@staticmethod
def get(username):
user = UserManager.find_static(username)
if not user:
return {"error": "Not found"}, 404
return user.jsonify()
def init(api):
api.add_resource(APIUser, "/users/<username>")
|
Remove dead code in get user API endpoint
|
Remove dead code in get user API endpoint
|
Python
|
mit
|
pajlada/tyggbot,pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/pajbot,pajlada/tyggbot
|
python
|
## Code Before:
from flask_restful import Resource
from pajbot.managers.redis import RedisManager
from pajbot.managers.user import UserManager
from pajbot.streamhelper import StreamHelper
class APIUser(Resource):
@staticmethod
def get(username):
user = UserManager.find_static(username)
if not user:
return {"error": "Not found"}, 404
redis = RedisManager.get()
key = "{streamer}:users:num_lines".format(streamer=StreamHelper.get_streamer())
rank = redis.zrevrank(key, user.username)
if rank is None:
rank = redis.zcard(key)
else:
rank = rank + 1
return user.jsonify()
def init(api):
api.add_resource(APIUser, "/users/<username>")
## Instruction:
Remove dead code in get user API endpoint
## Code After:
from flask_restful import Resource
from pajbot.managers.redis import RedisManager
from pajbot.managers.user import UserManager
from pajbot.streamhelper import StreamHelper
class APIUser(Resource):
@staticmethod
def get(username):
user = UserManager.find_static(username)
if not user:
return {"error": "Not found"}, 404
return user.jsonify()
def init(api):
api.add_resource(APIUser, "/users/<username>")
|
# ... existing code ...
if not user:
return {"error": "Not found"}, 404
return user.jsonify()
# ... rest of the code ...
|
7777019a00df754e5e61f8d26af86014a02f8f92
|
src/pbkdf/pbkdf2/pbkdf2.h
|
src/pbkdf/pbkdf2/pbkdf2.h
|
/*
* PBKDF2
* (C) 1999-2007 Jack Lloyd
*
* Distributed under the terms of the Botan license
*/
#ifndef BOTAN_PBKDF2_H__
#define BOTAN_PBKDF2_H__
#include <botan/pbkdf.h>
#include <botan/mac.h>
namespace Botan {
/**
* PKCS #5 PBKDF2
*/
class BOTAN_DLL PKCS5_PBKDF2 : public PBKDF
{
public:
std::string name() const
{
return "PBKDF2(" + mac->name() + ")";
}
PBKDF* clone() const
{
return new PKCS5_PBKDF2(mac->clone());
}
OctetString derive_key(u32bit output_len,
const std::string& passphrase,
const byte salt[], u32bit salt_len,
u32bit iterations) const;
/**
* Create a PKCS #5 instance using the specified message auth code
* @param mac the MAC to use
*/
PKCS5_PBKDF2(MessageAuthenticationCode* m) : mac(m) {}
/**
* Destructor
*/
~PKCS5_PBKDF2() { delete mac; }
private:
MessageAuthenticationCode* mac;
};
}
#endif
|
/*
* PBKDF2
* (C) 1999-2007 Jack Lloyd
*
* Distributed under the terms of the Botan license
*/
#ifndef BOTAN_PBKDF2_H__
#define BOTAN_PBKDF2_H__
#include <botan/pbkdf.h>
#include <botan/mac.h>
namespace Botan {
/**
* PKCS #5 PBKDF2
*/
class BOTAN_DLL PKCS5_PBKDF2 : public PBKDF
{
public:
std::string name() const
{
return "PBKDF2(" + mac->name() + ")";
}
PBKDF* clone() const
{
return new PKCS5_PBKDF2(mac->clone());
}
OctetString derive_key(u32bit output_len,
const std::string& passphrase,
const byte salt[], u32bit salt_len,
u32bit iterations) const;
/**
* Create a PKCS #5 instance using the specified message auth code
* @param mac_fn the MAC to use
*/
PKCS5_PBKDF2(MessageAuthenticationCode* mac_fn) : mac(mac_fn) {}
/**
* Destructor
*/
~PKCS5_PBKDF2() { delete mac; }
private:
MessageAuthenticationCode* mac;
};
}
#endif
|
Fix Doxygen comment in PBKDF2 constructor
|
Fix Doxygen comment in PBKDF2 constructor
|
C
|
bsd-2-clause
|
Rohde-Schwarz-Cybersecurity/botan,webmaster128/botan,Rohde-Schwarz-Cybersecurity/botan,webmaster128/botan,webmaster128/botan,Rohde-Schwarz-Cybersecurity/botan,randombit/botan,randombit/botan,webmaster128/botan,randombit/botan,Rohde-Schwarz-Cybersecurity/botan,randombit/botan,webmaster128/botan,Rohde-Schwarz-Cybersecurity/botan,Rohde-Schwarz-Cybersecurity/botan,randombit/botan
|
c
|
## Code Before:
/*
* PBKDF2
* (C) 1999-2007 Jack Lloyd
*
* Distributed under the terms of the Botan license
*/
#ifndef BOTAN_PBKDF2_H__
#define BOTAN_PBKDF2_H__
#include <botan/pbkdf.h>
#include <botan/mac.h>
namespace Botan {
/**
* PKCS #5 PBKDF2
*/
class BOTAN_DLL PKCS5_PBKDF2 : public PBKDF
{
public:
std::string name() const
{
return "PBKDF2(" + mac->name() + ")";
}
PBKDF* clone() const
{
return new PKCS5_PBKDF2(mac->clone());
}
OctetString derive_key(u32bit output_len,
const std::string& passphrase,
const byte salt[], u32bit salt_len,
u32bit iterations) const;
/**
* Create a PKCS #5 instance using the specified message auth code
* @param mac the MAC to use
*/
PKCS5_PBKDF2(MessageAuthenticationCode* m) : mac(m) {}
/**
* Destructor
*/
~PKCS5_PBKDF2() { delete mac; }
private:
MessageAuthenticationCode* mac;
};
}
#endif
## Instruction:
Fix Doxygen comment in PBKDF2 constructor
## Code After:
/*
* PBKDF2
* (C) 1999-2007 Jack Lloyd
*
* Distributed under the terms of the Botan license
*/
#ifndef BOTAN_PBKDF2_H__
#define BOTAN_PBKDF2_H__
#include <botan/pbkdf.h>
#include <botan/mac.h>
namespace Botan {
/**
* PKCS #5 PBKDF2
*/
class BOTAN_DLL PKCS5_PBKDF2 : public PBKDF
{
public:
std::string name() const
{
return "PBKDF2(" + mac->name() + ")";
}
PBKDF* clone() const
{
return new PKCS5_PBKDF2(mac->clone());
}
OctetString derive_key(u32bit output_len,
const std::string& passphrase,
const byte salt[], u32bit salt_len,
u32bit iterations) const;
/**
* Create a PKCS #5 instance using the specified message auth code
* @param mac_fn the MAC to use
*/
PKCS5_PBKDF2(MessageAuthenticationCode* mac_fn) : mac(mac_fn) {}
/**
* Destructor
*/
~PKCS5_PBKDF2() { delete mac; }
private:
MessageAuthenticationCode* mac;
};
}
#endif
|
// ... existing code ...
/**
* Create a PKCS #5 instance using the specified message auth code
* @param mac_fn the MAC to use
*/
PKCS5_PBKDF2(MessageAuthenticationCode* mac_fn) : mac(mac_fn) {}
/**
* Destructor
// ... rest of the code ...
|
42e0da4a0ce867dbc186665754418f5bce98301f
|
DcpmPkg/cli/NvmDimmCli.h
|
DcpmPkg/cli/NvmDimmCli.h
|
/*
* Copyright (c) 2018, Intel Corporation.
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <CommandParser.h>
#if defined(__LINUX__) || defined(__ESX__)
#define EXE_NAME L"ipmctl"
#elif defined(_MSC_VER)
#define EXE_NAME L"ipmctl.exe"
#else
#define EXE_NAME L"ipmctl.efi"
#endif
#define APP_DESCRIPTION L"Command Line Interface"
#define DRIVER_API_DESCRIPTION L"Driver API"
extern EFI_HANDLE gNvmDimmCliHiiHandle;
//
// This is the generated String package data for all .UNI files.
// This data array is ready to be used as input of HiiAddPackages() to
// create a packagelist (which contains Form packages, String packages, etc).
//
extern unsigned char ipmctlStrings[];
extern int g_basic_commands;
/**
Register commands on the commands list
@retval a return code from called functions
**/
EFI_STATUS
RegisterCommands(
);
/**
Register basic commands on the commands list for non-root users
@retval a return code from called functions
**/
EFI_STATUS
RegisterNonAdminUserCommands(
);
/**
Print the CLI application help
**/
EFI_STATUS showHelp(struct Command *pCmd);
|
/*
* Copyright (c) 2018, Intel Corporation.
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <CommandParser.h>
#if defined(__LINUX__) || defined(__ESX__)
#define EXE_NAME L"ipmctl"
#elif defined(_MSC_VER) && defined(OS_BUILD)
#define EXE_NAME L"ipmctl.exe"
#else
#define EXE_NAME L"ipmctl.efi"
#endif
#define APP_DESCRIPTION L"Command Line Interface"
#define DRIVER_API_DESCRIPTION L"Driver API"
extern EFI_HANDLE gNvmDimmCliHiiHandle;
//
// This is the generated String package data for all .UNI files.
// This data array is ready to be used as input of HiiAddPackages() to
// create a packagelist (which contains Form packages, String packages, etc).
//
extern unsigned char ipmctlStrings[];
extern int g_basic_commands;
/**
Register commands on the commands list
@retval a return code from called functions
**/
EFI_STATUS
RegisterCommands(
);
/**
Register basic commands on the commands list for non-root users
@retval a return code from called functions
**/
EFI_STATUS
RegisterNonAdminUserCommands(
);
/**
Print the CLI application help
**/
EFI_STATUS showHelp(struct Command *pCmd);
|
Fix incorrect help message for UEFI
|
Fix incorrect help message for UEFI
Signed-off-by: Shilpa Nanja <[email protected]>
|
C
|
bsd-3-clause
|
intel/ipmctl,intel/ipmctl,intel/ipmctl,intel/ipmctl
|
c
|
## Code Before:
/*
* Copyright (c) 2018, Intel Corporation.
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <CommandParser.h>
#if defined(__LINUX__) || defined(__ESX__)
#define EXE_NAME L"ipmctl"
#elif defined(_MSC_VER)
#define EXE_NAME L"ipmctl.exe"
#else
#define EXE_NAME L"ipmctl.efi"
#endif
#define APP_DESCRIPTION L"Command Line Interface"
#define DRIVER_API_DESCRIPTION L"Driver API"
extern EFI_HANDLE gNvmDimmCliHiiHandle;
//
// This is the generated String package data for all .UNI files.
// This data array is ready to be used as input of HiiAddPackages() to
// create a packagelist (which contains Form packages, String packages, etc).
//
extern unsigned char ipmctlStrings[];
extern int g_basic_commands;
/**
Register commands on the commands list
@retval a return code from called functions
**/
EFI_STATUS
RegisterCommands(
);
/**
Register basic commands on the commands list for non-root users
@retval a return code from called functions
**/
EFI_STATUS
RegisterNonAdminUserCommands(
);
/**
Print the CLI application help
**/
EFI_STATUS showHelp(struct Command *pCmd);
## Instruction:
Fix incorrect help message for UEFI
Signed-off-by: Shilpa Nanja <[email protected]>
## Code After:
/*
* Copyright (c) 2018, Intel Corporation.
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <CommandParser.h>
#if defined(__LINUX__) || defined(__ESX__)
#define EXE_NAME L"ipmctl"
#elif defined(_MSC_VER) && defined(OS_BUILD)
#define EXE_NAME L"ipmctl.exe"
#else
#define EXE_NAME L"ipmctl.efi"
#endif
#define APP_DESCRIPTION L"Command Line Interface"
#define DRIVER_API_DESCRIPTION L"Driver API"
extern EFI_HANDLE gNvmDimmCliHiiHandle;
//
// This is the generated String package data for all .UNI files.
// This data array is ready to be used as input of HiiAddPackages() to
// create a packagelist (which contains Form packages, String packages, etc).
//
extern unsigned char ipmctlStrings[];
extern int g_basic_commands;
/**
Register commands on the commands list
@retval a return code from called functions
**/
EFI_STATUS
RegisterCommands(
);
/**
Register basic commands on the commands list for non-root users
@retval a return code from called functions
**/
EFI_STATUS
RegisterNonAdminUserCommands(
);
/**
Print the CLI application help
**/
EFI_STATUS showHelp(struct Command *pCmd);
|
...
#if defined(__LINUX__) || defined(__ESX__)
#define EXE_NAME L"ipmctl"
#elif defined(_MSC_VER) && defined(OS_BUILD)
#define EXE_NAME L"ipmctl.exe"
#else
#define EXE_NAME L"ipmctl.efi"
...
|
1f250c6113ed69dc3373afbc40a93bdc7d8e7894
|
pages_scrape.py
|
pages_scrape.py
|
import logging
import requests
def scrape(url, extractor):
"""
Function to request and parse a given URL. Returns only the "relevant"
text.
Parameters
----------
url : String.
URL to request and parse.
extractor : Goose class instance.
An instance of Goose that allows for parsing of content.
Returns
-------
text : String.
Parsed text from the specified website.
meta : String.
Parsed meta description of an article. Usually equivalent to the
lede.
"""
logger = logging.getLogger('scraper_log')
try:
headers = {'User-Agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36"}
page = requests.get(url, headers=headers)
try:
article = extractor.extract(raw_html=page.content)
text = article.cleaned_text
meta = article.meta_description
return text, meta
#Generic error catching is bad
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem scraping URL: {}. {}.'.format(url, e))
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem requesting url: {}. {}'.format(url, e))
|
import logging
import requests
def scrape(url, extractor):
"""
Function to request and parse a given URL. Returns only the "relevant"
text.
Parameters
----------
url : String.
URL to request and parse.
extractor : Goose class instance.
An instance of Goose that allows for parsing of content.
Returns
-------
text : String.
Parsed text from the specified website.
meta : String.
Parsed meta description of an article. Usually equivalent to the
lede.
"""
logger = logging.getLogger('scraper_log')
try:
headers = {'User-Agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36"}
page = requests.get(url, headers=headers)
try:
try:
article = extractor.extract(raw_html=page.content)
except UnicodeDecodeError:
article = extractor.extract(raw_html=page.content.decode('utf-8',
errors='replace'))
text = article.cleaned_text
meta = article.meta_description
return text, meta
#Generic error catching is bad
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem scraping URL: {}. {}.'.format(url, e))
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem requesting url: {}. {}'.format(url, e))
|
Handle UTF errors with invalid bytes.
|
Handle UTF errors with invalid bytes.
|
Python
|
mit
|
openeventdata/scraper,chilland/scraper
|
python
|
## Code Before:
import logging
import requests
def scrape(url, extractor):
"""
Function to request and parse a given URL. Returns only the "relevant"
text.
Parameters
----------
url : String.
URL to request and parse.
extractor : Goose class instance.
An instance of Goose that allows for parsing of content.
Returns
-------
text : String.
Parsed text from the specified website.
meta : String.
Parsed meta description of an article. Usually equivalent to the
lede.
"""
logger = logging.getLogger('scraper_log')
try:
headers = {'User-Agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36"}
page = requests.get(url, headers=headers)
try:
article = extractor.extract(raw_html=page.content)
text = article.cleaned_text
meta = article.meta_description
return text, meta
#Generic error catching is bad
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem scraping URL: {}. {}.'.format(url, e))
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem requesting url: {}. {}'.format(url, e))
## Instruction:
Handle UTF errors with invalid bytes.
## Code After:
import logging
import requests
def scrape(url, extractor):
"""
Function to request and parse a given URL. Returns only the "relevant"
text.
Parameters
----------
url : String.
URL to request and parse.
extractor : Goose class instance.
An instance of Goose that allows for parsing of content.
Returns
-------
text : String.
Parsed text from the specified website.
meta : String.
Parsed meta description of an article. Usually equivalent to the
lede.
"""
logger = logging.getLogger('scraper_log')
try:
headers = {'User-Agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36"}
page = requests.get(url, headers=headers)
try:
try:
article = extractor.extract(raw_html=page.content)
except UnicodeDecodeError:
article = extractor.extract(raw_html=page.content.decode('utf-8',
errors='replace'))
text = article.cleaned_text
meta = article.meta_description
return text, meta
#Generic error catching is bad
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem scraping URL: {}. {}.'.format(url, e))
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem requesting url: {}. {}'.format(url, e))
|
# ... existing code ...
page = requests.get(url, headers=headers)
try:
try:
article = extractor.extract(raw_html=page.content)
except UnicodeDecodeError:
article = extractor.extract(raw_html=page.content.decode('utf-8',
errors='replace'))
text = article.cleaned_text
meta = article.meta_description
return text, meta
# ... rest of the code ...
|
9d3750881eaa215f6d06087e6d0f7b6d223c3cd1
|
feincms3/plugins/richtext.py
|
feincms3/plugins/richtext.py
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('CleansedRichTextField', 'RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
class Media:
js = ('feincms3/plugin_ckeditor.js',)
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
"""
Rich text plugin
Usage::
class Page(...):
# ...
PagePlugin = create_plugin_base(Page)
class RichText(plugins.RichText, PagePlugin):
pass
To use this, a django-ckeditor_ configuration named ``richtext-plugin`` is
required. See the section :mod:`HTML cleansing <feincms3.cleanse>` for the
recommended configuration.
"""
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
"""
The only difference with the standard ``ContentEditorInline`` is that this
inline adds the ``feincms3/plugin_ckeditor.js`` file which handles the
CKEditor widget activation and deactivation inside the content editor.
"""
class Media:
js = ('feincms3/plugin_ckeditor.js',)
|
Document the rich text plugin
|
Document the rich text plugin
|
Python
|
bsd-3-clause
|
matthiask/feincms3,matthiask/feincms3,matthiask/feincms3
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('CleansedRichTextField', 'RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
class Media:
js = ('feincms3/plugin_ckeditor.js',)
## Instruction:
Document the rich text plugin
## Code After:
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
"""
Rich text plugin
Usage::
class Page(...):
# ...
PagePlugin = create_plugin_base(Page)
class RichText(plugins.RichText, PagePlugin):
pass
To use this, a django-ckeditor_ configuration named ``richtext-plugin`` is
required. See the section :mod:`HTML cleansing <feincms3.cleanse>` for the
recommended configuration.
"""
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
"""
The only difference with the standard ``ContentEditorInline`` is that this
inline adds the ``feincms3/plugin_ckeditor.js`` file which handles the
CKEditor widget activation and deactivation inside the content editor.
"""
class Media:
js = ('feincms3/plugin_ckeditor.js',)
|
...
from feincms3.cleanse import CleansedRichTextField
__all__ = ('RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
"""
Rich text plugin
Usage::
class Page(...):
# ...
PagePlugin = create_plugin_base(Page)
class RichText(plugins.RichText, PagePlugin):
pass
To use this, a django-ckeditor_ configuration named ``richtext-plugin`` is
required. See the section :mod:`HTML cleansing <feincms3.cleanse>` for the
recommended configuration.
"""
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
...
class RichTextInline(ContentEditorInline):
"""
The only difference with the standard ``ContentEditorInline`` is that this
inline adds the ``feincms3/plugin_ckeditor.js`` file which handles the
CKEditor widget activation and deactivation inside the content editor.
"""
class Media:
js = ('feincms3/plugin_ckeditor.js',)
...
|
da663dd03d71768b4b58c0ac2c1ec7b7513b3ef8
|
test/NotifierTest.h
|
test/NotifierTest.h
|
class NotifierTest : public ::testing::Test {
public:
NotifierTest() {
};
~NotifierTest() {
}
protected:
virtual void SetUp() {
};
virtual void TearDown() {
FileIO::RemoveFileAsRoot(notifierIPCPath);
FileIO::RemoveFileAsRoot(handshakeIPCPath);
zctx_interrupted = false;
};
private:
const std::string notifierIPCPath = "/tmp/RestartServicesQueue.ipc";
const std::string handshakeIPCPath = "/tmp/RestartServicesHandshakeQueue.ipc";
};
|
class NotifierTest : public ::testing::Test {
public:
NotifierTest() {
};
~NotifierTest() {
}
protected:
virtual void SetUp() {
};
virtual void TearDown() {
FileIO::RemoveFile(notifierIPCPath);
FileIO::RemoveFile(handshakeIPCPath);
zctx_interrupted = false;
};
private:
const std::string notifierIPCPath = "/tmp/RestartServicesQueue.ipc";
const std::string handshakeIPCPath = "/tmp/RestartServicesHandshakeQueue.ipc";
};
|
Change RemoveFileAsRoot to RemoveFile per change in FileIO.
|
Change RemoveFileAsRoot to RemoveFile per change in FileIO.
|
C
|
mit
|
LogRhythm/QueueNado,LogRhythm/QueueNado,weberr13/QueueNado,weberr13/QueueNado,john-gress/QueueNado,john-gress/QueueNado
|
c
|
## Code Before:
class NotifierTest : public ::testing::Test {
public:
NotifierTest() {
};
~NotifierTest() {
}
protected:
virtual void SetUp() {
};
virtual void TearDown() {
FileIO::RemoveFileAsRoot(notifierIPCPath);
FileIO::RemoveFileAsRoot(handshakeIPCPath);
zctx_interrupted = false;
};
private:
const std::string notifierIPCPath = "/tmp/RestartServicesQueue.ipc";
const std::string handshakeIPCPath = "/tmp/RestartServicesHandshakeQueue.ipc";
};
## Instruction:
Change RemoveFileAsRoot to RemoveFile per change in FileIO.
## Code After:
class NotifierTest : public ::testing::Test {
public:
NotifierTest() {
};
~NotifierTest() {
}
protected:
virtual void SetUp() {
};
virtual void TearDown() {
FileIO::RemoveFile(notifierIPCPath);
FileIO::RemoveFile(handshakeIPCPath);
zctx_interrupted = false;
};
private:
const std::string notifierIPCPath = "/tmp/RestartServicesQueue.ipc";
const std::string handshakeIPCPath = "/tmp/RestartServicesHandshakeQueue.ipc";
};
|
// ... existing code ...
};
virtual void TearDown() {
FileIO::RemoveFile(notifierIPCPath);
FileIO::RemoveFile(handshakeIPCPath);
zctx_interrupted = false;
};
// ... rest of the code ...
|
698732f1276f92a94143b0531906caf37e885c28
|
trello_notifications.py
|
trello_notifications.py
|
try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all():
pass
def noop():
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output)
|
try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
self.connection = connection
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all(self):
pass
def noop(self):
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output)
|
Store connection and missing self
|
Store connection and missing self
|
Python
|
mit
|
NicoSantangelo/sublime-text-trello
|
python
|
## Code Before:
try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all():
pass
def noop():
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output)
## Instruction:
Store connection and missing self
## Code After:
try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
self.connection = connection
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all(self):
pass
def noop(self):
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output)
|
# ... existing code ...
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
self.connection = connection
def items(self):
return [option['name'] for option in self.options]
# ... modified code ...
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all(self):
pass
def noop(self):
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
# ... rest of the code ...
|
04d6348536a393439c36a7433c4fbf89c52ded32
|
src/qt/clientmodel.h
|
src/qt/clientmodel.h
|
class OptionsModel;
class AddressTableModel;
class TransactionTableModel;
class CWallet;
QT_BEGIN_NAMESPACE
class QDateTime;
QT_END_NAMESPACE
// Interface to Bitcoin network client
class ClientModel : public QObject
{
Q_OBJECT
public:
// The only reason that this constructor takes a wallet is because
// the global client settings are stored in the main wallet.
explicit ClientModel(OptionsModel *optionsModel, QObject *parent = 0);
OptionsModel *getOptionsModel();
int getNumConnections() const;
int getNumBlocks() const;
QDateTime getLastBlockDate() const;
// Return true if client connected to testnet
bool isTestNet() const;
// Return true if core is doing initial block download
bool inInitialBlockDownload() const;
// Return conservative estimate of total number of blocks, or 0 if unknown
int getTotalBlocksEstimate() const;
QString formatFullVersion() const;
private:
OptionsModel *optionsModel;
int cachedNumConnections;
int cachedNumBlocks;
signals:
void numConnectionsChanged(int count);
void numBlocksChanged(int count);
// Asynchronous error notification
void error(const QString &title, const QString &message);
public slots:
private slots:
void update();
};
#endif // CLIENTMODEL_H
|
class OptionsModel;
class AddressTableModel;
class TransactionTableModel;
class CWallet;
QT_BEGIN_NAMESPACE
class QDateTime;
QT_END_NAMESPACE
// Interface to Bitcoin network client
class ClientModel : public QObject
{
Q_OBJECT
public:
explicit ClientModel(OptionsModel *optionsModel, QObject *parent = 0);
OptionsModel *getOptionsModel();
int getNumConnections() const;
int getNumBlocks() const;
QDateTime getLastBlockDate() const;
// Return true if client connected to testnet
bool isTestNet() const;
// Return true if core is doing initial block download
bool inInitialBlockDownload() const;
// Return conservative estimate of total number of blocks, or 0 if unknown
int getTotalBlocksEstimate() const;
QString formatFullVersion() const;
private:
OptionsModel *optionsModel;
int cachedNumConnections;
int cachedNumBlocks;
signals:
void numConnectionsChanged(int count);
void numBlocksChanged(int count);
// Asynchronous error notification
void error(const QString &title, const QString &message);
public slots:
private slots:
void update();
};
#endif // CLIENTMODEL_H
|
Remove no longer valid comment
|
Remove no longer valid comment
|
C
|
mit
|
MidasPaymentLTD/midascoin,MidasPaymentLTD/midascoin,MidasPaymentLTD/midascoin,MidasPaymentLTD/midascoin,MidasPaymentLTD/midascoin
|
c
|
## Code Before:
class OptionsModel;
class AddressTableModel;
class TransactionTableModel;
class CWallet;
QT_BEGIN_NAMESPACE
class QDateTime;
QT_END_NAMESPACE
// Interface to Bitcoin network client
class ClientModel : public QObject
{
Q_OBJECT
public:
// The only reason that this constructor takes a wallet is because
// the global client settings are stored in the main wallet.
explicit ClientModel(OptionsModel *optionsModel, QObject *parent = 0);
OptionsModel *getOptionsModel();
int getNumConnections() const;
int getNumBlocks() const;
QDateTime getLastBlockDate() const;
// Return true if client connected to testnet
bool isTestNet() const;
// Return true if core is doing initial block download
bool inInitialBlockDownload() const;
// Return conservative estimate of total number of blocks, or 0 if unknown
int getTotalBlocksEstimate() const;
QString formatFullVersion() const;
private:
OptionsModel *optionsModel;
int cachedNumConnections;
int cachedNumBlocks;
signals:
void numConnectionsChanged(int count);
void numBlocksChanged(int count);
// Asynchronous error notification
void error(const QString &title, const QString &message);
public slots:
private slots:
void update();
};
#endif // CLIENTMODEL_H
## Instruction:
Remove no longer valid comment
## Code After:
class OptionsModel;
class AddressTableModel;
class TransactionTableModel;
class CWallet;
QT_BEGIN_NAMESPACE
class QDateTime;
QT_END_NAMESPACE
// Interface to Bitcoin network client
class ClientModel : public QObject
{
Q_OBJECT
public:
explicit ClientModel(OptionsModel *optionsModel, QObject *parent = 0);
OptionsModel *getOptionsModel();
int getNumConnections() const;
int getNumBlocks() const;
QDateTime getLastBlockDate() const;
// Return true if client connected to testnet
bool isTestNet() const;
// Return true if core is doing initial block download
bool inInitialBlockDownload() const;
// Return conservative estimate of total number of blocks, or 0 if unknown
int getTotalBlocksEstimate() const;
QString formatFullVersion() const;
private:
OptionsModel *optionsModel;
int cachedNumConnections;
int cachedNumBlocks;
signals:
void numConnectionsChanged(int count);
void numBlocksChanged(int count);
// Asynchronous error notification
void error(const QString &title, const QString &message);
public slots:
private slots:
void update();
};
#endif // CLIENTMODEL_H
|
# ... existing code ...
{
Q_OBJECT
public:
explicit ClientModel(OptionsModel *optionsModel, QObject *parent = 0);
OptionsModel *getOptionsModel();
# ... rest of the code ...
|
e7f12e4b9236c29b226c074577368e94269380c2
|
spring-boot/hazelcast/hazelcast-embeded-cache/src/main/java/io/refectoring/cache/embedded/HazelcastNode.java
|
spring-boot/hazelcast/hazelcast-embeded-cache/src/main/java/io/refectoring/cache/embedded/HazelcastNode.java
|
package io.refectoring.cache.embedded;
import com.hazelcast.config.Config;
import com.hazelcast.config.MapConfig;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.map.IMap;
import io.refectoring.cache.embedded.rest.Car;
import org.springframework.stereotype.Component;
@Component
public class HazelcastNode {
public static final String CARS = "cars";
private final HazelcastInstance hzInstance = Hazelcast.newHazelcastInstance(createConfig());
public String put(String number, Car car){
IMap<Object, Object> map = hzInstance.getMap(CARS);
return (String) map.put(number, car);
}
public Car get(String key){
IMap<Object, Object> map = hzInstance.getMap(CARS);
return (Car) map.get(key);
}
public Config createConfig() {
Config config = new Config();
MapConfig mapConfig = new MapConfig(CARS);
mapConfig.setTimeToLiveSeconds(20);
mapConfig.setMaxIdleSeconds(360);
config.addMapConfig(mapConfig);
return config;
}
}
|
package io.refectoring.cache.embedded;
import com.hazelcast.config.*;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.map.IMap;
import io.refectoring.cache.embedded.rest.Car;
import org.springframework.stereotype.Component;
@Component
public class HazelcastNode {
public static final String CARS = "cars";
private final HazelcastInstance hzInstance = Hazelcast.newHazelcastInstance(createConfig());
public String put(String number, Car car){
IMap<Object, Object> map = hzInstance.getMap(CARS);
return (String) map.putIfAbsent(number, car);
}
public Car get(String key){
IMap<Object, Object> map = hzInstance.getMap(CARS);
return (Car) map.get(key);
}
public Config createConfig() {
Config config = new Config();
config.addMapConfig(mapConfig());
return config;
}
private MapConfig mapConfig() {
MapConfig mapConfig = new MapConfig(CARS);
mapConfig.setEvictionConfig(evictionConfig());
mapConfig.setTimeToLiveSeconds(20);
mapConfig.setMaxIdleSeconds(360);
return mapConfig;
}
private EvictionConfig evictionConfig() {
EvictionConfig evictionConfig = new EvictionConfig();
evictionConfig.setEvictionPolicy(EvictionPolicy.LFU);
evictionConfig.setSize(2000);
return evictionConfig;
}
}
|
Add eviction configuration for the Map.
|
Add eviction configuration for the Map.
|
Java
|
mit
|
thombergs/code-examples,thombergs/code-examples,thombergs/code-examples,thombergs/code-examples,thombergs/code-examples,thombergs/code-examples
|
java
|
## Code Before:
package io.refectoring.cache.embedded;
import com.hazelcast.config.Config;
import com.hazelcast.config.MapConfig;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.map.IMap;
import io.refectoring.cache.embedded.rest.Car;
import org.springframework.stereotype.Component;
@Component
public class HazelcastNode {
public static final String CARS = "cars";
private final HazelcastInstance hzInstance = Hazelcast.newHazelcastInstance(createConfig());
public String put(String number, Car car){
IMap<Object, Object> map = hzInstance.getMap(CARS);
return (String) map.put(number, car);
}
public Car get(String key){
IMap<Object, Object> map = hzInstance.getMap(CARS);
return (Car) map.get(key);
}
public Config createConfig() {
Config config = new Config();
MapConfig mapConfig = new MapConfig(CARS);
mapConfig.setTimeToLiveSeconds(20);
mapConfig.setMaxIdleSeconds(360);
config.addMapConfig(mapConfig);
return config;
}
}
## Instruction:
Add eviction configuration for the Map.
## Code After:
package io.refectoring.cache.embedded;
import com.hazelcast.config.*;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.map.IMap;
import io.refectoring.cache.embedded.rest.Car;
import org.springframework.stereotype.Component;
@Component
public class HazelcastNode {
public static final String CARS = "cars";
private final HazelcastInstance hzInstance = Hazelcast.newHazelcastInstance(createConfig());
public String put(String number, Car car){
IMap<Object, Object> map = hzInstance.getMap(CARS);
return (String) map.putIfAbsent(number, car);
}
public Car get(String key){
IMap<Object, Object> map = hzInstance.getMap(CARS);
return (Car) map.get(key);
}
public Config createConfig() {
Config config = new Config();
config.addMapConfig(mapConfig());
return config;
}
private MapConfig mapConfig() {
MapConfig mapConfig = new MapConfig(CARS);
mapConfig.setEvictionConfig(evictionConfig());
mapConfig.setTimeToLiveSeconds(20);
mapConfig.setMaxIdleSeconds(360);
return mapConfig;
}
private EvictionConfig evictionConfig() {
EvictionConfig evictionConfig = new EvictionConfig();
evictionConfig.setEvictionPolicy(EvictionPolicy.LFU);
evictionConfig.setSize(2000);
return evictionConfig;
}
}
|
// ... existing code ...
package io.refectoring.cache.embedded;
import com.hazelcast.config.*;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.map.IMap;
// ... modified code ...
public String put(String number, Car car){
IMap<Object, Object> map = hzInstance.getMap(CARS);
return (String) map.putIfAbsent(number, car);
}
public Car get(String key){
...
public Config createConfig() {
Config config = new Config();
config.addMapConfig(mapConfig());
return config;
}
private MapConfig mapConfig() {
MapConfig mapConfig = new MapConfig(CARS);
mapConfig.setEvictionConfig(evictionConfig());
mapConfig.setTimeToLiveSeconds(20);
mapConfig.setMaxIdleSeconds(360);
return mapConfig;
}
private EvictionConfig evictionConfig() {
EvictionConfig evictionConfig = new EvictionConfig();
evictionConfig.setEvictionPolicy(EvictionPolicy.LFU);
evictionConfig.setSize(2000);
return evictionConfig;
}
}
// ... rest of the code ...
|
adc79737e1932724fa38533ecf67a65bf77a6dc8
|
setup.py
|
setup.py
|
from distutils.core import setup, Extension
import numpy.distutils
setup(
name='Libact',
version='0.1.0',
description='Active learning package',
long_description='Active learning package',
author='LSC',
author_email='[email protected]',
url='http://www.csie.ntu.edu.tw/~htlin/',
packages=[
'libact.base',
'libact.models',
'libact.labelers',
'libact.query_strategies',
],
package_dir={
'libact.base': 'libact/base',
'libact.models': 'libact/models',
'libact.labelers': 'libact/labelers',
'libact.query_strategies': 'libact/query_strategies',
},
ext_modules=[
Extension(
"libact.query_strategies._variance_reduction",
["libact/query_strategies/variance_reduction.c"],
extra_link_args=['-llapacke -llapack -lblas'],
extra_compile_args=['-std=c11'],
include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(),
),
],
)
|
from distutils.core import setup, Extension
import numpy.distutils
import sys
if sys.platform == 'darwin':
print("Platform Detection: Mac OS X. Link to openblas...")
extra_link_args = ['-L/usr/local/opt/openblas/lib -lopenblas']
include_dirs = (numpy.distutils.misc_util.get_numpy_include_dirs() +
['/usr/local/opt/openblas/include'])
else:
# assume linux otherwise, unless we support Windows in the future...
print("Platform Detection: Linux. Link to liblapacke...")
extra_link_args = ['-llapacke -llapack -lblas']
include_dirs = numpy.distutils.misc_util.get_numpy_include_dirs()
setup(
name='Libact',
version='0.1.0',
description='Active learning package',
long_description='Active learning package',
author='LSC',
author_email='[email protected]',
url='http://www.csie.ntu.edu.tw/~htlin/',
packages=[
'libact.base',
'libact.models',
'libact.labelers',
'libact.query_strategies',
],
package_dir={
'libact.base': 'libact/base',
'libact.models': 'libact/models',
'libact.labelers': 'libact/labelers',
'libact.query_strategies': 'libact/query_strategies',
},
ext_modules=[
Extension(
"libact.query_strategies._variance_reduction",
["libact/query_strategies/variance_reduction.c"],
extra_link_args=extra_link_args,
extra_compile_args=['-std=c11'],
include_dirs=include_dirs,
),
],
)
|
Fix compiling flags for darwin.
|
Fix compiling flags for darwin.
The OpenBLAS formula is keg-only, which means it was not symlinked into
/usr/local. Thus, we need to add the build variables manually.
Also, the library is named as openblas, which means `-llapack` and `-llapacke`
will cause library not found error.
|
Python
|
bsd-2-clause
|
ntucllab/libact,ntucllab/libact,ntucllab/libact
|
python
|
## Code Before:
from distutils.core import setup, Extension
import numpy.distutils
setup(
name='Libact',
version='0.1.0',
description='Active learning package',
long_description='Active learning package',
author='LSC',
author_email='[email protected]',
url='http://www.csie.ntu.edu.tw/~htlin/',
packages=[
'libact.base',
'libact.models',
'libact.labelers',
'libact.query_strategies',
],
package_dir={
'libact.base': 'libact/base',
'libact.models': 'libact/models',
'libact.labelers': 'libact/labelers',
'libact.query_strategies': 'libact/query_strategies',
},
ext_modules=[
Extension(
"libact.query_strategies._variance_reduction",
["libact/query_strategies/variance_reduction.c"],
extra_link_args=['-llapacke -llapack -lblas'],
extra_compile_args=['-std=c11'],
include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(),
),
],
)
## Instruction:
Fix compiling flags for darwin.
The OpenBLAS formula is keg-only, which means it was not symlinked into
/usr/local. Thus, we need to add the build variables manually.
Also, the library is named as openblas, which means `-llapack` and `-llapacke`
will cause library not found error.
## Code After:
from distutils.core import setup, Extension
import numpy.distutils
import sys
if sys.platform == 'darwin':
print("Platform Detection: Mac OS X. Link to openblas...")
extra_link_args = ['-L/usr/local/opt/openblas/lib -lopenblas']
include_dirs = (numpy.distutils.misc_util.get_numpy_include_dirs() +
['/usr/local/opt/openblas/include'])
else:
# assume linux otherwise, unless we support Windows in the future...
print("Platform Detection: Linux. Link to liblapacke...")
extra_link_args = ['-llapacke -llapack -lblas']
include_dirs = numpy.distutils.misc_util.get_numpy_include_dirs()
setup(
name='Libact',
version='0.1.0',
description='Active learning package',
long_description='Active learning package',
author='LSC',
author_email='[email protected]',
url='http://www.csie.ntu.edu.tw/~htlin/',
packages=[
'libact.base',
'libact.models',
'libact.labelers',
'libact.query_strategies',
],
package_dir={
'libact.base': 'libact/base',
'libact.models': 'libact/models',
'libact.labelers': 'libact/labelers',
'libact.query_strategies': 'libact/query_strategies',
},
ext_modules=[
Extension(
"libact.query_strategies._variance_reduction",
["libact/query_strategies/variance_reduction.c"],
extra_link_args=extra_link_args,
extra_compile_args=['-std=c11'],
include_dirs=include_dirs,
),
],
)
|
// ... existing code ...
from distutils.core import setup, Extension
import numpy.distutils
import sys
if sys.platform == 'darwin':
print("Platform Detection: Mac OS X. Link to openblas...")
extra_link_args = ['-L/usr/local/opt/openblas/lib -lopenblas']
include_dirs = (numpy.distutils.misc_util.get_numpy_include_dirs() +
['/usr/local/opt/openblas/include'])
else:
# assume linux otherwise, unless we support Windows in the future...
print("Platform Detection: Linux. Link to liblapacke...")
extra_link_args = ['-llapacke -llapack -lblas']
include_dirs = numpy.distutils.misc_util.get_numpy_include_dirs()
setup(
name='Libact',
// ... modified code ...
Extension(
"libact.query_strategies._variance_reduction",
["libact/query_strategies/variance_reduction.c"],
extra_link_args=extra_link_args,
extra_compile_args=['-std=c11'],
include_dirs=include_dirs,
),
],
)
// ... rest of the code ...
|
dbba6f10c867e64031ae07adb3d21becfe4a4e5a
|
law/contrib/cms/__init__.py
|
law/contrib/cms/__init__.py
|
__all__ = ["CMSJobDashboard", "BundleCMSSW"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
|
__all__ = ["CMSJobDashboard", "BundleCMSSW", "Site", "lfn_to_pfn"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
from law.contrib.cms.util import Site, lfn_to_pfn
|
Load utils in cms contrib package.
|
Load utils in cms contrib package.
|
Python
|
bsd-3-clause
|
riga/law,riga/law
|
python
|
## Code Before:
__all__ = ["CMSJobDashboard", "BundleCMSSW"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
## Instruction:
Load utils in cms contrib package.
## Code After:
__all__ = ["CMSJobDashboard", "BundleCMSSW", "Site", "lfn_to_pfn"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
from law.contrib.cms.util import Site, lfn_to_pfn
|
...
__all__ = ["CMSJobDashboard", "BundleCMSSW", "Site", "lfn_to_pfn"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
from law.contrib.cms.util import Site, lfn_to_pfn
...
|
2cb406cac1a6faf1f2f79c1376ceac39871fb96e
|
pony_barn/build-django.py
|
pony_barn/build-django.py
|
import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
|
import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
|
Make it so that django build actually uses it's own code.
|
Make it so that django build actually uses it's own code.
|
Python
|
mit
|
ericholscher/pony_barn,ericholscher/pony_barn
|
python
|
## Code Before:
import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
## Instruction:
Make it so that django build actually uses it's own code.
## Code After:
import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
|
// ... existing code ...
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
// ... rest of the code ...
|
4d1a50b1765cd5da46ac9633b3c91eb5e0a72f3d
|
tests/toolchains/test_atmel_studio.py
|
tests/toolchains/test_atmel_studio.py
|
import sys
import unittest
from asninja.parser import AtmelStudioProject
from asninja.toolchains.atmel_studio import *
class TestAtmelStudioGccToolchain(unittest.TestCase):
def test_constructor(self):
tc = AtmelStudioGccToolchain('arm-')
self.assertEqual('arm-', tc.path)
self.assertEqual('arm', tc.tool_type)
@unittest.skipUnless(sys.platform.startswith("win"), "requires Windows")
def test_from_project(self):
asp = AtmelStudioProject('Korsar3.cproj', 'Korsar3')
tc = AtmelStudioGccToolchain.from_project(asp)
self.assertEqual('C:\\Program Files (x86)\\Atmel\\Atmel Studio 6.2\\..\\Atmel Toolchain\\ARM GCC\\Native\\'
'4.8.1437\\arm-gnu-toolchain\\bin', tc.path)
self.assertEqual('arm', tc.tool_type)
@unittest.skipUnless(sys.platform.startswith("win"), "requires Windows")
def test_read_reg(self):
pass
if __name__ == '__main__':
unittest.main()
|
import sys
import unittest
from unittest.mock import patch
from asninja.parser import AtmelStudioProject
from asninja.toolchains.atmel_studio import *
class TestAtmelStudioGccToolchain(unittest.TestCase):
def test_constructor(self):
tc = AtmelStudioGccToolchain('arm-')
self.assertEqual('arm-', tc.path)
self.assertEqual('arm', tc.tool_type)
@patch.object(AtmelStudioGccToolchain, 'read_reg', return_value = 'DUMMY_PATH')
def test_from_project(self, mock_method):
asp = AtmelStudioProject('Korsar3.cproj', 'Korsar3')
tc = AtmelStudioGccToolchain.from_project(asp)
self.assertEqual('DUMMY_PATH\\..\\Atmel Toolchain\\ARM GCC\\Native\\4.8.1437\\arm-gnu-toolchain\\bin', tc.path)
self.assertEqual('arm', tc.tool_type)
@unittest.skipUnless(sys.platform.startswith("win"), "requires Windows")
def test_read_reg(self):
pass
if __name__ == '__main__':
unittest.main()
|
Use unittest.mock to mock Windows Registry read.
|
tests: Use unittest.mock to mock Windows Registry read.
|
Python
|
mit
|
alunegov/AtmelStudioToNinja
|
python
|
## Code Before:
import sys
import unittest
from asninja.parser import AtmelStudioProject
from asninja.toolchains.atmel_studio import *
class TestAtmelStudioGccToolchain(unittest.TestCase):
def test_constructor(self):
tc = AtmelStudioGccToolchain('arm-')
self.assertEqual('arm-', tc.path)
self.assertEqual('arm', tc.tool_type)
@unittest.skipUnless(sys.platform.startswith("win"), "requires Windows")
def test_from_project(self):
asp = AtmelStudioProject('Korsar3.cproj', 'Korsar3')
tc = AtmelStudioGccToolchain.from_project(asp)
self.assertEqual('C:\\Program Files (x86)\\Atmel\\Atmel Studio 6.2\\..\\Atmel Toolchain\\ARM GCC\\Native\\'
'4.8.1437\\arm-gnu-toolchain\\bin', tc.path)
self.assertEqual('arm', tc.tool_type)
@unittest.skipUnless(sys.platform.startswith("win"), "requires Windows")
def test_read_reg(self):
pass
if __name__ == '__main__':
unittest.main()
## Instruction:
tests: Use unittest.mock to mock Windows Registry read.
## Code After:
import sys
import unittest
from unittest.mock import patch
from asninja.parser import AtmelStudioProject
from asninja.toolchains.atmel_studio import *
class TestAtmelStudioGccToolchain(unittest.TestCase):
def test_constructor(self):
tc = AtmelStudioGccToolchain('arm-')
self.assertEqual('arm-', tc.path)
self.assertEqual('arm', tc.tool_type)
@patch.object(AtmelStudioGccToolchain, 'read_reg', return_value = 'DUMMY_PATH')
def test_from_project(self, mock_method):
asp = AtmelStudioProject('Korsar3.cproj', 'Korsar3')
tc = AtmelStudioGccToolchain.from_project(asp)
self.assertEqual('DUMMY_PATH\\..\\Atmel Toolchain\\ARM GCC\\Native\\4.8.1437\\arm-gnu-toolchain\\bin', tc.path)
self.assertEqual('arm', tc.tool_type)
@unittest.skipUnless(sys.platform.startswith("win"), "requires Windows")
def test_read_reg(self):
pass
if __name__ == '__main__':
unittest.main()
|
...
import sys
import unittest
from unittest.mock import patch
from asninja.parser import AtmelStudioProject
from asninja.toolchains.atmel_studio import *
...
self.assertEqual('arm-', tc.path)
self.assertEqual('arm', tc.tool_type)
@patch.object(AtmelStudioGccToolchain, 'read_reg', return_value = 'DUMMY_PATH')
def test_from_project(self, mock_method):
asp = AtmelStudioProject('Korsar3.cproj', 'Korsar3')
tc = AtmelStudioGccToolchain.from_project(asp)
self.assertEqual('DUMMY_PATH\\..\\Atmel Toolchain\\ARM GCC\\Native\\4.8.1437\\arm-gnu-toolchain\\bin', tc.path)
self.assertEqual('arm', tc.tool_type)
@unittest.skipUnless(sys.platform.startswith("win"), "requires Windows")
...
|
e4b516f612d60eac8cb278d1c8675b3fdbad8652
|
windmill/server/__init__.py
|
windmill/server/__init__.py
|
import wsgi, convergence
forwarding_conditions = [lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url']]
def add_forward_condition(condition):
forwarding_conditions.append(condition)
def remove_forward_condition(condition):
while condition in forwarding_conditions:
forwarding_conditions.remove(condition)
|
import wsgi, convergence
forwarding_conditions = [
lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url'],
lambda e : 'mozilla.org/en-US/firefox/livebookmarks.html' not in e['reconstructed_url'],
]
def add_forward_condition(condition):
forwarding_conditions.append(condition)
def remove_forward_condition(condition):
while condition in forwarding_conditions:
forwarding_conditions.remove(condition)
|
Fix for [ticket:281]. Do not forward livebookmarks request.
|
Fix for [ticket:281]. Do not forward livebookmarks request.
git-svn-id: 87d19257dd11500985d055ec4730e446075a5f07@1261 78c7df6f-8922-0410-bcd3-9426b1ad491b
|
Python
|
apache-2.0
|
ept/windmill,ept/windmill,ept/windmill
|
python
|
## Code Before:
import wsgi, convergence
forwarding_conditions = [lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url']]
def add_forward_condition(condition):
forwarding_conditions.append(condition)
def remove_forward_condition(condition):
while condition in forwarding_conditions:
forwarding_conditions.remove(condition)
## Instruction:
Fix for [ticket:281]. Do not forward livebookmarks request.
git-svn-id: 87d19257dd11500985d055ec4730e446075a5f07@1261 78c7df6f-8922-0410-bcd3-9426b1ad491b
## Code After:
import wsgi, convergence
forwarding_conditions = [
lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url'],
lambda e : 'mozilla.org/en-US/firefox/livebookmarks.html' not in e['reconstructed_url'],
]
def add_forward_condition(condition):
forwarding_conditions.append(condition)
def remove_forward_condition(condition):
while condition in forwarding_conditions:
forwarding_conditions.remove(condition)
|
...
import wsgi, convergence
forwarding_conditions = [
lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url'],
lambda e : 'mozilla.org/en-US/firefox/livebookmarks.html' not in e['reconstructed_url'],
]
def add_forward_condition(condition):
forwarding_conditions.append(condition)
...
|
3fb9e3a841f15e5c84a860f5a3836fce628adfea
|
genlib/delput.c
|
genlib/delput.c
|
void
delput(float x, float *a, int *l)
{
/* put value in delay line. See delset. x is float */
*(a + (*l)++) = x;
if(*(l) >= *(l+1)) *l -= *(l+1);
}
|
/* put value in delay line. See delset. x is float */
void
delput(float x, float *a, int *l)
{
int index = l[0];
a[index] = x;
l[0]++;
if (l[0] >= l[2])
l[0] -= l[2];
}
|
Update to new bookkeeping array format.
|
Update to new bookkeeping array format.
|
C
|
apache-2.0
|
RTcmix/RTcmix,RTcmix/RTcmix,RTcmix/RTcmix,RTcmix/RTcmix,RTcmix/RTcmix,RTcmix/RTcmix
|
c
|
## Code Before:
void
delput(float x, float *a, int *l)
{
/* put value in delay line. See delset. x is float */
*(a + (*l)++) = x;
if(*(l) >= *(l+1)) *l -= *(l+1);
}
## Instruction:
Update to new bookkeeping array format.
## Code After:
/* put value in delay line. See delset. x is float */
void
delput(float x, float *a, int *l)
{
int index = l[0];
a[index] = x;
l[0]++;
if (l[0] >= l[2])
l[0] -= l[2];
}
|
// ... existing code ...
/* put value in delay line. See delset. x is float */
void
delput(float x, float *a, int *l)
{
int index = l[0];
a[index] = x;
l[0]++;
if (l[0] >= l[2])
l[0] -= l[2];
}
// ... rest of the code ...
|
044a051c637f256613ff307caf3ae0126d09b049
|
backend/unichat/views.py
|
backend/unichat/views.py
|
from django.http import JsonResponse, HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
import json
from helpers import get_school_list, check_signup_email
def get_schools(request):
resp = JsonResponse({'schools': get_school_list()})
resp['Access-Control-Allow-Origin'] = 'http://localhost:3000'
return resp
@csrf_exempt
def signup(request):
if request.method == "POST":
signup_parameters = json.loads(request.body.decode('utf-8'))
if check_signup_email(signup_parameters['email']):
resp = HttpResponse('Signup OK')
else:
resp = HttpResponseBadRequest('Invalid univesity email')
elif request.method == "OPTIONS":
resp = HttpResponse('')
resp['Access-Control-Allow-Headers'] = 'Content-Type'
else:
resp = HttpResponseBadRequest('')
resp['Access-Control-Allow-Origin'] = 'http://localhost:3000'
return resp
|
from django.http import JsonResponse, HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
import json
from helpers import get_school_list, check_signup_email
def get_schools(request):
resp = JsonResponse({'schools': get_school_list()})
resp['Access-Control-Allow-Origin'] = 'http://localhost:3000'
return resp
@csrf_exempt
def signup(request):
if request.method == "POST":
signup_parameters = json.loads(request.body.decode('utf-8'))
if check_signup_email(signup_parameters['email']):
resp = HttpResponse('Signup OK')
else:
resp = HttpResponseBadRequest('Invalid univesity email')
elif request.method == "OPTIONS":
resp = HttpResponse('')
resp['Access-Control-Allow-Headers'] = 'Content-Type'
else:
resp = HttpResponseBadRequest('Invalid request method')
resp['Access-Control-Allow-Origin'] = 'http://localhost:3000'
return resp
|
Add error message to BadRequest signup response for invalid method
|
Add error message to BadRequest signup response for invalid method
|
Python
|
mit
|
dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet
|
python
|
## Code Before:
from django.http import JsonResponse, HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
import json
from helpers import get_school_list, check_signup_email
def get_schools(request):
resp = JsonResponse({'schools': get_school_list()})
resp['Access-Control-Allow-Origin'] = 'http://localhost:3000'
return resp
@csrf_exempt
def signup(request):
if request.method == "POST":
signup_parameters = json.loads(request.body.decode('utf-8'))
if check_signup_email(signup_parameters['email']):
resp = HttpResponse('Signup OK')
else:
resp = HttpResponseBadRequest('Invalid univesity email')
elif request.method == "OPTIONS":
resp = HttpResponse('')
resp['Access-Control-Allow-Headers'] = 'Content-Type'
else:
resp = HttpResponseBadRequest('')
resp['Access-Control-Allow-Origin'] = 'http://localhost:3000'
return resp
## Instruction:
Add error message to BadRequest signup response for invalid method
## Code After:
from django.http import JsonResponse, HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
import json
from helpers import get_school_list, check_signup_email
def get_schools(request):
resp = JsonResponse({'schools': get_school_list()})
resp['Access-Control-Allow-Origin'] = 'http://localhost:3000'
return resp
@csrf_exempt
def signup(request):
if request.method == "POST":
signup_parameters = json.loads(request.body.decode('utf-8'))
if check_signup_email(signup_parameters['email']):
resp = HttpResponse('Signup OK')
else:
resp = HttpResponseBadRequest('Invalid univesity email')
elif request.method == "OPTIONS":
resp = HttpResponse('')
resp['Access-Control-Allow-Headers'] = 'Content-Type'
else:
resp = HttpResponseBadRequest('Invalid request method')
resp['Access-Control-Allow-Origin'] = 'http://localhost:3000'
return resp
|
...
resp = HttpResponse('')
resp['Access-Control-Allow-Headers'] = 'Content-Type'
else:
resp = HttpResponseBadRequest('Invalid request method')
resp['Access-Control-Allow-Origin'] = 'http://localhost:3000'
return resp
...
|
b1bb08a8ee246774b43e521e8f754cdcc88c418b
|
gasistafelice/gas/management.py
|
gasistafelice/gas/management.py
|
from django.db.models.signals import post_syncdb
from gasistafelice.gas.workflow_data import workflow_dict
def init_workflows(app, created_models, verbosity, **kwargs):
app_label = app.__name__.split('.')[-2]
if app_label == 'workflows' and created_models: # `worklows` app was syncronized for the first time
# now that all necessary tables are in the DB, we can register our workflows
for name, w in workflow_dict.items():
w.register_workflow()
if verbosity == 2:
# give some feedback to the user
print "Workflow %s was successfully registered." % name
return
post_syncdb.connect(init_workflows)
|
from django.db.models.signals import post_syncdb
from gasistafelice.gas.workflow_data import workflow_dict
def init_workflows(app, created_models, verbosity, **kwargs):
app_label = app.__name__.split('.')[-2]
if app_label == 'workflows' and "Workflow" in created_models: # `worklows` app was syncronized for the first time
# now that all necessary tables are in the DB, we can register our workflows
for name, w in workflow_dict.items():
w.register_workflow()
if verbosity == 2:
# give some feedback to the user
print "Workflow %s was successfully registered." % name
return
post_syncdb.connect(init_workflows)
|
Fix in post_syncdb workflow registration
|
Fix in post_syncdb workflow registration
|
Python
|
agpl-3.0
|
michelesr/gasistafelice,befair/gasistafelice,matteo88/gasistafelice,matteo88/gasistafelice,OrlyMar/gasistafelice,kobe25/gasistafelice,michelesr/gasistafelice,kobe25/gasistafelice,befair/gasistafelice,michelesr/gasistafelice,matteo88/gasistafelice,kobe25/gasistafelice,feroda/gasistafelice,OrlyMar/gasistafelice,michelesr/gasistafelice,OrlyMar/gasistafelice,feroda/gasistafelice,OrlyMar/gasistafelice,kobe25/gasistafelice,matteo88/gasistafelice,befair/gasistafelice,feroda/gasistafelice,befair/gasistafelice,feroda/gasistafelice
|
python
|
## Code Before:
from django.db.models.signals import post_syncdb
from gasistafelice.gas.workflow_data import workflow_dict
def init_workflows(app, created_models, verbosity, **kwargs):
app_label = app.__name__.split('.')[-2]
if app_label == 'workflows' and created_models: # `worklows` app was syncronized for the first time
# now that all necessary tables are in the DB, we can register our workflows
for name, w in workflow_dict.items():
w.register_workflow()
if verbosity == 2:
# give some feedback to the user
print "Workflow %s was successfully registered." % name
return
post_syncdb.connect(init_workflows)
## Instruction:
Fix in post_syncdb workflow registration
## Code After:
from django.db.models.signals import post_syncdb
from gasistafelice.gas.workflow_data import workflow_dict
def init_workflows(app, created_models, verbosity, **kwargs):
app_label = app.__name__.split('.')[-2]
if app_label == 'workflows' and "Workflow" in created_models: # `worklows` app was syncronized for the first time
# now that all necessary tables are in the DB, we can register our workflows
for name, w in workflow_dict.items():
w.register_workflow()
if verbosity == 2:
# give some feedback to the user
print "Workflow %s was successfully registered." % name
return
post_syncdb.connect(init_workflows)
|
...
def init_workflows(app, created_models, verbosity, **kwargs):
app_label = app.__name__.split('.')[-2]
if app_label == 'workflows' and "Workflow" in created_models: # `worklows` app was syncronized for the first time
# now that all necessary tables are in the DB, we can register our workflows
for name, w in workflow_dict.items():
w.register_workflow()
...
|
b3845ae85ebef5e2ad4136ca8a1d3cc6149f4098
|
CriminalIntent/app/src/main/java/com/bignerdranch/android/criminalintent/CrimeListFragment.java
|
CriminalIntent/app/src/main/java/com/bignerdranch/android/criminalintent/CrimeListFragment.java
|
package com.bignerdranch.android.criminalintent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
public class CrimeListFragment extends Fragment {
private RecyclerView mRecyclerView;
@Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_crime_list, container, false);
mRecyclerView = (RecyclerView) view.findViewById(R.id.crime_recycler_view);
mRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
return view;
}
}
|
package com.bignerdranch.android.criminalintent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
public class CrimeListFragment extends Fragment {
private RecyclerView mRecyclerView;
@Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_crime_list, container, false);
mRecyclerView = (RecyclerView) view.findViewById(R.id.crime_recycler_view);
mRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
return view;
}
private class CrimeHolder extends RecyclerView.ViewHolder {
public TextView mTitleTextView;
public CrimeHolder(View itemView) {
super (itemView);
mTitleTextView = (TextView) itemView;
}
}
}
|
Create new ViewHolder for Crime
|
Create new ViewHolder for Crime
|
Java
|
apache-2.0
|
dcordero/BigNerdRanch-Android
|
java
|
## Code Before:
package com.bignerdranch.android.criminalintent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
public class CrimeListFragment extends Fragment {
private RecyclerView mRecyclerView;
@Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_crime_list, container, false);
mRecyclerView = (RecyclerView) view.findViewById(R.id.crime_recycler_view);
mRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
return view;
}
}
## Instruction:
Create new ViewHolder for Crime
## Code After:
package com.bignerdranch.android.criminalintent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
public class CrimeListFragment extends Fragment {
private RecyclerView mRecyclerView;
@Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_crime_list, container, false);
mRecyclerView = (RecyclerView) view.findViewById(R.id.crime_recycler_view);
mRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
return view;
}
private class CrimeHolder extends RecyclerView.ViewHolder {
public TextView mTitleTextView;
public CrimeHolder(View itemView) {
super (itemView);
mTitleTextView = (TextView) itemView;
}
}
}
|
# ... existing code ...
return view;
}
private class CrimeHolder extends RecyclerView.ViewHolder {
public TextView mTitleTextView;
public CrimeHolder(View itemView) {
super (itemView);
mTitleTextView = (TextView) itemView;
}
}
}
# ... rest of the code ...
|
403250e91905079c7480bb8ea54cf2d2a301022f
|
moto/s3/urls.py
|
moto/s3/urls.py
|
from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>[a-zA-Z0-9\-_.]+)': S3ResponseInstance.key_response,
}
|
from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>.+)': S3ResponseInstance.key_response,
}
|
Fix S3 URL Regex to allow slashes in key names.
|
Fix S3 URL Regex to allow slashes in key names.
|
Python
|
apache-2.0
|
heddle317/moto,gjtempleton/moto,rocky4570/moto,jszwedko/moto,dbfr3qs/moto,okomestudio/moto,whummer/moto,2rs2ts/moto,Brett55/moto,ZuluPro/moto,whummer/moto,kefo/moto,ZuluPro/moto,botify-labs/moto,rocky4570/moto,rocky4570/moto,whummer/moto,spulec/moto,Brett55/moto,william-richard/moto,2rs2ts/moto,rocky4570/moto,jrydberg/moto,gjtempleton/moto,Affirm/moto,botify-labs/moto,heddle317/moto,kennethd/moto,botify-labs/moto,kefo/moto,kefo/moto,Affirm/moto,im-auld/moto,ZuluPro/moto,botify-labs/moto,dbfr3qs/moto,okomestudio/moto,william-richard/moto,rocky4570/moto,Brett55/moto,pior/moto,okomestudio/moto,Affirm/moto,Brett55/moto,DataDog/moto,whummer/moto,IlyaSukhanov/moto,2rs2ts/moto,spulec/moto,spulec/moto,Affirm/moto,Brett55/moto,Brett55/moto,whummer/moto,whummer/moto,2rs2ts/moto,heddle317/moto,gjtempleton/moto,okomestudio/moto,heddle317/moto,zonk1024/moto,spulec/moto,behanceops/moto,dbfr3qs/moto,dbfr3qs/moto,dbfr3qs/moto,ZuluPro/moto,mrucci/moto,kefo/moto,alexdebrie/moto,rouge8/moto,dbfr3qs/moto,Affirm/moto,2mf/moto,gjtempleton/moto,ZuluPro/moto,spulec/moto,ZuluPro/moto,heddle317/moto,okomestudio/moto,Affirm/moto,kefo/moto,william-richard/moto,william-richard/moto,ImmobilienScout24/moto,william-richard/moto,rocky4570/moto,braintreeps/moto,okomestudio/moto,andresriancho/moto,araines/moto,2rs2ts/moto,ludia/moto,spulec/moto,silveregg/moto,gjtempleton/moto,riccardomc/moto,jotes/moto,tootedom/moto,botify-labs/moto,EarthmanT/moto,botify-labs/moto,william-richard/moto
|
python
|
## Code Before:
from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>[a-zA-Z0-9\-_.]+)': S3ResponseInstance.key_response,
}
## Instruction:
Fix S3 URL Regex to allow slashes in key names.
## Code After:
from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>.+)': S3ResponseInstance.key_response,
}
|
# ... existing code ...
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>.+)': S3ResponseInstance.key_response,
}
# ... rest of the code ...
|
f8c8c14e0ca6f8e3174a14f519b395a4e0bfe043
|
setup.py
|
setup.py
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = '[email protected]'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires = ['six >= 1.6', 'numpy >= 1.8']
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = '[email protected]'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires =
[ 'six >= 1.6'
, 'numpy >= 1.8'
, 'scipy >= 0.14.0'
]
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
|
Install requirements now include SciPy.
|
Install requirements now include SciPy.
Used in the operators subpackage, and will likely be used elsewhere due
to the sparse package being inside scipy.
|
Python
|
bsd-3-clause
|
ryanorendorff/pyop
|
python
|
## Code Before:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = '[email protected]'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires = ['six >= 1.6', 'numpy >= 1.8']
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
## Instruction:
Install requirements now include SciPy.
Used in the operators subpackage, and will likely be used elsewhere due
to the sparse package being inside scipy.
## Code After:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = '[email protected]'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires =
[ 'six >= 1.6'
, 'numpy >= 1.8'
, 'scipy >= 0.14.0'
]
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
|
# ... existing code ...
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires =
[ 'six >= 1.6'
, 'numpy >= 1.8'
, 'scipy >= 0.14.0'
]
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
# ... rest of the code ...
|
c297b882cbe5139062672dbb295c2b42adfc1ed8
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='bwapi',
version='3.2.0',
description='A software development kit for the Brandwatch API',
url='https://github.com/BrandwatchLtd/api_sdk',
author='Amy Barker, Jamie Lebovics, Paul Siegel and Jessica Bowden',
author_email='[email protected], [email protected], [email protected]',
license='License :: OSI Approved :: MIT License',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'
],
py_modules=['bwproject', 'bwresources', 'bwdata', 'filters'],
install_requires=['requests'],
tests_require=['responses']
)
|
from setuptools import setup
setup(
name='bwapi',
version='3.2.0',
description='A software development kit for the Brandwatch API',
url='https://github.com/BrandwatchLtd/api_sdk',
author='Amy Barker, Jamie Lebovics, Paul Siegel and Jessica Bowden',
author_email='[email protected], [email protected], [email protected]',
license='License :: OSI Approved :: MIT License',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'
],
py_modules=['bwproject', 'bwresources', 'bwdata', 'filters'],
scripts=['authenticate.py'],
install_requires=['requests'],
tests_require=['responses']
)
|
Add authenticate.py to installed scripts
|
Add authenticate.py to installed scripts
|
Python
|
mit
|
anthonybu/api_sdk,BrandwatchLtd/api_sdk
|
python
|
## Code Before:
from setuptools import setup
setup(
name='bwapi',
version='3.2.0',
description='A software development kit for the Brandwatch API',
url='https://github.com/BrandwatchLtd/api_sdk',
author='Amy Barker, Jamie Lebovics, Paul Siegel and Jessica Bowden',
author_email='[email protected], [email protected], [email protected]',
license='License :: OSI Approved :: MIT License',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'
],
py_modules=['bwproject', 'bwresources', 'bwdata', 'filters'],
install_requires=['requests'],
tests_require=['responses']
)
## Instruction:
Add authenticate.py to installed scripts
## Code After:
from setuptools import setup
setup(
name='bwapi',
version='3.2.0',
description='A software development kit for the Brandwatch API',
url='https://github.com/BrandwatchLtd/api_sdk',
author='Amy Barker, Jamie Lebovics, Paul Siegel and Jessica Bowden',
author_email='[email protected], [email protected], [email protected]',
license='License :: OSI Approved :: MIT License',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'
],
py_modules=['bwproject', 'bwresources', 'bwdata', 'filters'],
scripts=['authenticate.py'],
install_requires=['requests'],
tests_require=['responses']
)
|
...
py_modules=['bwproject', 'bwresources', 'bwdata', 'filters'],
scripts=['authenticate.py'],
install_requires=['requests'],
tests_require=['responses']
...
|
0d7921b4dcf5e3b511fdb54fc30ebc0547b14d47
|
django_dzenlog/urls.py
|
django_dzenlog/urls.py
|
from django.conf.urls.defaults import *
from models import GeneralPost
from feeds import LatestPosts
post_list = {
'queryset': GeneralPost.objects.all(),
}
feeds = {
'all': LatestPosts,
}
urlpatterns = patterns('django.views.generic',
(r'^(?P<slug>[a-z0-9-]+)/$', 'list_detail.object_detail', post_list, 'dzenlog-post-details'),
(r'^$', 'list_detail.object_list', post_list, 'dzenlog-post-list'),
)
urlpatterns += patterns('django.contrib.syndication.views',
(r'^rss/(?P<url>.*)/$', 'feed', {'feed_dict': feeds}, 'dzenlog-feeds'),
)
|
from django.conf.urls.defaults import *
from models import GeneralPost
from feeds import latest
post_list = {
'queryset': GeneralPost.objects.all(),
}
feeds = {
'all': latest(GeneralPost, 'dzenlog-post-list'),
}
urlpatterns = patterns('django.views.generic',
(r'^(?P<slug>[a-z0-9-]+)/$', 'list_detail.object_detail', post_list, 'dzenlog-post-details'),
(r'^$', 'list_detail.object_list', post_list, 'dzenlog-post-list'),
)
urlpatterns += patterns('django.contrib.syndication.views',
(r'^rss/(?P<url>.*)/$', 'feed', {'feed_dict': feeds}, 'dzenlog-feeds'),
)
|
Use 'latest' to generate feed for GeneralPost.
|
Use 'latest' to generate feed for GeneralPost.
|
Python
|
bsd-3-clause
|
svetlyak40wt/django-dzenlog
|
python
|
## Code Before:
from django.conf.urls.defaults import *
from models import GeneralPost
from feeds import LatestPosts
post_list = {
'queryset': GeneralPost.objects.all(),
}
feeds = {
'all': LatestPosts,
}
urlpatterns = patterns('django.views.generic',
(r'^(?P<slug>[a-z0-9-]+)/$', 'list_detail.object_detail', post_list, 'dzenlog-post-details'),
(r'^$', 'list_detail.object_list', post_list, 'dzenlog-post-list'),
)
urlpatterns += patterns('django.contrib.syndication.views',
(r'^rss/(?P<url>.*)/$', 'feed', {'feed_dict': feeds}, 'dzenlog-feeds'),
)
## Instruction:
Use 'latest' to generate feed for GeneralPost.
## Code After:
from django.conf.urls.defaults import *
from models import GeneralPost
from feeds import latest
post_list = {
'queryset': GeneralPost.objects.all(),
}
feeds = {
'all': latest(GeneralPost, 'dzenlog-post-list'),
}
urlpatterns = patterns('django.views.generic',
(r'^(?P<slug>[a-z0-9-]+)/$', 'list_detail.object_detail', post_list, 'dzenlog-post-details'),
(r'^$', 'list_detail.object_list', post_list, 'dzenlog-post-list'),
)
urlpatterns += patterns('django.contrib.syndication.views',
(r'^rss/(?P<url>.*)/$', 'feed', {'feed_dict': feeds}, 'dzenlog-feeds'),
)
|
...
from django.conf.urls.defaults import *
from models import GeneralPost
from feeds import latest
post_list = {
'queryset': GeneralPost.objects.all(),
...
}
feeds = {
'all': latest(GeneralPost, 'dzenlog-post-list'),
}
urlpatterns = patterns('django.views.generic',
...
|
741545dcf58fdfaf882d797d3ce4f7607ca0dad4
|
kobo/client/commands/cmd_resubmit_tasks.py
|
kobo/client/commands/cmd_resubmit_tasks.py
|
from __future__ import print_function
import sys
from kobo.client.task_watcher import TaskWatcher
from kobo.client import ClientCommand
class Resubmit_Tasks(ClientCommand):
"""resubmit failed tasks"""
enabled = True
def options(self):
self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name
self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.")
def run(self, *args, **kwargs):
if len(args) == 0:
self.parser.error("At least one task id must be specified.")
username = kwargs.pop("username", None)
password = kwargs.pop("password", None)
tasks = args
self.set_hub(username, password)
resubmitted_tasks = []
failed = False
for task_id in tasks:
try:
resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False))
resubmitted_tasks.append(resubmitted_id)
except Exception as ex:
failed = True
print(ex)
TaskWatcher.watch_tasks(self.hub, resubmitted_tasks)
if failed:
sys.exit(1)
|
from __future__ import print_function
import sys
from kobo.client.task_watcher import TaskWatcher
from kobo.client import ClientCommand
class Resubmit_Tasks(ClientCommand):
"""resubmit failed tasks"""
enabled = True
def options(self):
self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name
self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.")
self.parser.add_option("--nowait", default=False, action="store_true", help="Don't wait until tasks finish.")
def run(self, *args, **kwargs):
if len(args) == 0:
self.parser.error("At least one task id must be specified.")
username = kwargs.pop("username", None)
password = kwargs.pop("password", None)
tasks = args
self.set_hub(username, password)
resubmitted_tasks = []
failed = False
for task_id in tasks:
try:
resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False))
resubmitted_tasks.append(resubmitted_id)
except Exception as ex:
failed = True
print(ex)
if not kwargs.get('nowait'):
TaskWatcher.watch_tasks(self.hub, resubmitted_tasks)
if failed:
sys.exit(1)
|
Add --nowait option to resubmit-tasks cmd
|
Add --nowait option to resubmit-tasks cmd
In some use cases, waiting till the tasks finish is undesirable. Nowait
option should be provided.
|
Python
|
lgpl-2.1
|
release-engineering/kobo,release-engineering/kobo,release-engineering/kobo,release-engineering/kobo
|
python
|
## Code Before:
from __future__ import print_function
import sys
from kobo.client.task_watcher import TaskWatcher
from kobo.client import ClientCommand
class Resubmit_Tasks(ClientCommand):
"""resubmit failed tasks"""
enabled = True
def options(self):
self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name
self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.")
def run(self, *args, **kwargs):
if len(args) == 0:
self.parser.error("At least one task id must be specified.")
username = kwargs.pop("username", None)
password = kwargs.pop("password", None)
tasks = args
self.set_hub(username, password)
resubmitted_tasks = []
failed = False
for task_id in tasks:
try:
resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False))
resubmitted_tasks.append(resubmitted_id)
except Exception as ex:
failed = True
print(ex)
TaskWatcher.watch_tasks(self.hub, resubmitted_tasks)
if failed:
sys.exit(1)
## Instruction:
Add --nowait option to resubmit-tasks cmd
In some use cases, waiting till the tasks finish is undesirable. Nowait
option should be provided.
## Code After:
from __future__ import print_function
import sys
from kobo.client.task_watcher import TaskWatcher
from kobo.client import ClientCommand
class Resubmit_Tasks(ClientCommand):
"""resubmit failed tasks"""
enabled = True
def options(self):
self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name
self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.")
self.parser.add_option("--nowait", default=False, action="store_true", help="Don't wait until tasks finish.")
def run(self, *args, **kwargs):
if len(args) == 0:
self.parser.error("At least one task id must be specified.")
username = kwargs.pop("username", None)
password = kwargs.pop("password", None)
tasks = args
self.set_hub(username, password)
resubmitted_tasks = []
failed = False
for task_id in tasks:
try:
resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False))
resubmitted_tasks.append(resubmitted_id)
except Exception as ex:
failed = True
print(ex)
if not kwargs.get('nowait'):
TaskWatcher.watch_tasks(self.hub, resubmitted_tasks)
if failed:
sys.exit(1)
|
# ... existing code ...
def options(self):
self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name
self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.")
self.parser.add_option("--nowait", default=False, action="store_true", help="Don't wait until tasks finish.")
def run(self, *args, **kwargs):
# ... modified code ...
failed = True
print(ex)
if not kwargs.get('nowait'):
TaskWatcher.watch_tasks(self.hub, resubmitted_tasks)
if failed:
sys.exit(1)
# ... rest of the code ...
|
389ca2213c2ba3c86c783372e3e933a12f90506e
|
ckanext/requestdata/controllers/admin.py
|
ckanext/requestdata/controllers/admin.py
|
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(BaseController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')
|
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
from ckan.controllers.admin import AdminController
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(AdminController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')
|
Extend Admin instead of Base controller
|
Extend Admin instead of Base controller
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata
|
python
|
## Code Before:
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(BaseController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')
## Instruction:
Extend Admin instead of Base controller
## Code After:
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
from ckan.controllers.admin import AdminController
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(AdminController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')
|
# ... existing code ...
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
from ckan.controllers.admin import AdminController
get_action = logic.get_action
NotFound = logic.NotFound
# ... modified code ...
BaseController = base.BaseController
class AdminController(AdminController):
def email(self):
'''Email template admin tab.
# ... rest of the code ...
|
d2ac548441523e2ed4d0ac824e5972ae48be3b19
|
packages/Python/lldbsuite/test/lang/swift/closure_shortcuts/TestClosureShortcuts.py
|
packages/Python/lldbsuite/test/lang/swift/closure_shortcuts/TestClosureShortcuts.py
|
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(),
decorators=[swiftTest,skipUnlessDarwin])
|
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(),
decorators=[swiftTest])
|
Fix typo and run everywhere.
|
Fix typo and run everywhere.
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
python
|
## Code Before:
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(),
decorators=[swiftTest,skipUnlessDarwin])
## Instruction:
Fix typo and run everywhere.
## Code After:
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(),
decorators=[swiftTest])
|
...
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(),
decorators=[swiftTest])
...
|
9db544202867446850a7e72c37496a10b49364a5
|
Semaphore-OE-Batch-Tools/src/main/java/com/smartlogic/SparqlUpdateOptions.java
|
Semaphore-OE-Batch-Tools/src/main/java/com/smartlogic/SparqlUpdateOptions.java
|
package com.smartlogic;
/**
* Options to send with SPARQL update calls.
*/
public class SparqlUpdateOptions {
/**
* Set to true to accept constraint warnings and proceed with changes.
* Normally this is set to true when runCheckConstraints is set to false;
*/
public boolean acceptWarnings = false;
/**
* Set to false to not run check constraints when running SPARQL update.
*/
public boolean runCheckConstraints = true;
/**
* Set to false to not run edit rules when SPARQL Update is sent.
*/
public boolean runEditRules = true;
}
|
package com.smartlogic;
/**
* Options to send with SPARQL update calls.
*/
public class SparqlUpdateOptions {
/**
* Set to true to accept constraint warnings and proceed with changes.
* Normally this is set to true when runCheckConstraints is set to false;
*/
public boolean acceptWarnings = false;
/**
* Set to false to not run check constraints when running SPARQL update.
*/
public boolean runCheckConstraints = true;
/**
* Set to false to not run edit rules when SPARQL Update is sent.
*/
public boolean runEditRules = true;
public boolean isAcceptWarnings() {
return acceptWarnings;
}
public void setAcceptWarnings(boolean acceptWarnings) {
this.acceptWarnings = acceptWarnings;
}
public boolean isRunCheckConstraints() {
return runCheckConstraints;
}
public void setRunCheckConstraints(boolean runCheckConstraints) {
this.runCheckConstraints = runCheckConstraints;
}
public boolean isRunEditRules() {
return runEditRules;
}
public void setRunEditRules(boolean runEditRules) {
this.runEditRules = runEditRules;
}
}
|
Add getters/setters, because Java is a very verbose language and Spring is inflexible. Leaving class variables public as they can also be used by clients that don't require bean accessors, or those that are already using them.
|
Add getters/setters, because Java is a very verbose language and Spring is inflexible.
Leaving class variables public as they can also be used by clients that don't require bean accessors, or those that are already using them.
|
Java
|
apache-2.0
|
Smartlogic-Semaphore-Limited/Java-APIs
|
java
|
## Code Before:
package com.smartlogic;
/**
* Options to send with SPARQL update calls.
*/
public class SparqlUpdateOptions {
/**
* Set to true to accept constraint warnings and proceed with changes.
* Normally this is set to true when runCheckConstraints is set to false;
*/
public boolean acceptWarnings = false;
/**
* Set to false to not run check constraints when running SPARQL update.
*/
public boolean runCheckConstraints = true;
/**
* Set to false to not run edit rules when SPARQL Update is sent.
*/
public boolean runEditRules = true;
}
## Instruction:
Add getters/setters, because Java is a very verbose language and Spring is inflexible.
Leaving class variables public as they can also be used by clients that don't require bean accessors, or those that are already using them.
## Code After:
package com.smartlogic;
/**
* Options to send with SPARQL update calls.
*/
public class SparqlUpdateOptions {
/**
* Set to true to accept constraint warnings and proceed with changes.
* Normally this is set to true when runCheckConstraints is set to false;
*/
public boolean acceptWarnings = false;
/**
* Set to false to not run check constraints when running SPARQL update.
*/
public boolean runCheckConstraints = true;
/**
* Set to false to not run edit rules when SPARQL Update is sent.
*/
public boolean runEditRules = true;
public boolean isAcceptWarnings() {
return acceptWarnings;
}
public void setAcceptWarnings(boolean acceptWarnings) {
this.acceptWarnings = acceptWarnings;
}
public boolean isRunCheckConstraints() {
return runCheckConstraints;
}
public void setRunCheckConstraints(boolean runCheckConstraints) {
this.runCheckConstraints = runCheckConstraints;
}
public boolean isRunEditRules() {
return runEditRules;
}
public void setRunEditRules(boolean runEditRules) {
this.runEditRules = runEditRules;
}
}
|
# ... existing code ...
* Set to false to not run edit rules when SPARQL Update is sent.
*/
public boolean runEditRules = true;
public boolean isAcceptWarnings() {
return acceptWarnings;
}
public void setAcceptWarnings(boolean acceptWarnings) {
this.acceptWarnings = acceptWarnings;
}
public boolean isRunCheckConstraints() {
return runCheckConstraints;
}
public void setRunCheckConstraints(boolean runCheckConstraints) {
this.runCheckConstraints = runCheckConstraints;
}
public boolean isRunEditRules() {
return runEditRules;
}
public void setRunEditRules(boolean runEditRules) {
this.runEditRules = runEditRules;
}
}
# ... rest of the code ...
|
28fe6a0a1e5e5d8781854aad4f22d368d3d73b12
|
ld37/common/utils/libutils.py
|
ld37/common/utils/libutils.py
|
import math
def update_image_rect(image, rect):
image_rect = image.get_rect()
image_rect.x = rect.x
image_rect.y = rect.y
def distance_between_rects(rect1, rect2):
(r1_center_x, r1_center_y) = rect1.center
(r2_center_x, r2_center_y) = rect2.center
x_squared = (r1_center_x - r2_center_x)**2
y_squared = (r1_center_y - r2_center_y)**2
math.sqrt(x_squared + y_squared)
|
import math
def update_image_rect(image, rect):
image_rect = image.get_rect()
image_rect.x = rect.x
image_rect.y = rect.y
def distance_between_rects(rect1, rect2):
(r1_center_x, r1_center_y) = rect1.center
(r2_center_x, r2_center_y) = rect2.center
x_squared = (r2_center_x - r1_center_x)**2
y_squared = (r2_center_y - r1_center_y)**2
return math.sqrt(x_squared + y_squared)
|
Update distance formula to be more standard
|
Update distance formula to be more standard
|
Python
|
mit
|
Daihiro/ldjam37,maximx1/ldjam37
|
python
|
## Code Before:
import math
def update_image_rect(image, rect):
image_rect = image.get_rect()
image_rect.x = rect.x
image_rect.y = rect.y
def distance_between_rects(rect1, rect2):
(r1_center_x, r1_center_y) = rect1.center
(r2_center_x, r2_center_y) = rect2.center
x_squared = (r1_center_x - r2_center_x)**2
y_squared = (r1_center_y - r2_center_y)**2
math.sqrt(x_squared + y_squared)
## Instruction:
Update distance formula to be more standard
## Code After:
import math
def update_image_rect(image, rect):
image_rect = image.get_rect()
image_rect.x = rect.x
image_rect.y = rect.y
def distance_between_rects(rect1, rect2):
(r1_center_x, r1_center_y) = rect1.center
(r2_center_x, r2_center_y) = rect2.center
x_squared = (r2_center_x - r1_center_x)**2
y_squared = (r2_center_y - r1_center_y)**2
return math.sqrt(x_squared + y_squared)
|
...
def distance_between_rects(rect1, rect2):
(r1_center_x, r1_center_y) = rect1.center
(r2_center_x, r2_center_y) = rect2.center
x_squared = (r2_center_x - r1_center_x)**2
y_squared = (r2_center_y - r1_center_y)**2
return math.sqrt(x_squared + y_squared)
...
|
7c09e2df765b466b65570116fe8d0cc5f42d30dd
|
indra/sources/phosphoELM/api.py
|
indra/sources/phosphoELM/api.py
|
import csv
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return ppelm_json
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
|
import csv
from .processor import PhosphoELMPRocessor
s3_bucket = 'bigmech'
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return PhosphoELMPRocessor(file_dump_json=ppelm_json)
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
|
Return processor w processed statements
|
Return processor w processed statements
|
Python
|
bsd-2-clause
|
johnbachman/belpy,bgyori/indra,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/belpy
|
python
|
## Code Before:
import csv
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return ppelm_json
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
## Instruction:
Return processor w processed statements
## Code After:
import csv
from .processor import PhosphoELMPRocessor
s3_bucket = 'bigmech'
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return PhosphoELMPRocessor(file_dump_json=ppelm_json)
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
|
...
import csv
from .processor import PhosphoELMPRocessor
s3_bucket = 'bigmech'
ppelm_s3_key = ''
...
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return PhosphoELMPRocessor(file_dump_json=ppelm_json)
def _get_json_from_entry_rows(row_iter):
...
|
7d34f5abd1926a89a42602b0548023214451bf8c
|
java/solrcontrib/umichnormalizers/src/java/edu/umich/lib/solr/analysis/LCCNNormalizerFilterFactory.java
|
java/solrcontrib/umichnormalizers/src/java/edu/umich/lib/solr/analysis/LCCNNormalizerFilterFactory.java
|
package edu.umich.lib.solr.analysis;
import java.util.Map;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.analysis.TokenStream;
public class LCCNNormalizerFilterFactory extends TokenFilterFactory
{
protected LCCNNormalizerFilterFactory(Map<String, String> args) {
super(args);
}
public LCCNNormalizerFilter create(TokenStream input)
{
return new LCCNNormalizerFilter(input);
}
}
|
package edu.umich.lib.solr.analysis;
import java.util.Map;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.analysis.TokenStream;
public class LCCNNormalizerFilterFactory extends TokenFilterFactory
{
public LCCNNormalizerFilterFactory(Map<String, String> args) {
super(args);
}
@Override
public LCCNNormalizerFilter create(TokenStream input)
{
return new LCCNNormalizerFilter(input);
}
}
|
Fix constructor in LCCNNormalizationFilterFactory too
|
Fix constructor in LCCNNormalizationFilterFactory too
|
Java
|
bsd-3-clause
|
billdueber/lib.umich.edu-solr-stuff,billdueber/solr-libstdnum-normalize
|
java
|
## Code Before:
package edu.umich.lib.solr.analysis;
import java.util.Map;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.analysis.TokenStream;
public class LCCNNormalizerFilterFactory extends TokenFilterFactory
{
protected LCCNNormalizerFilterFactory(Map<String, String> args) {
super(args);
}
public LCCNNormalizerFilter create(TokenStream input)
{
return new LCCNNormalizerFilter(input);
}
}
## Instruction:
Fix constructor in LCCNNormalizationFilterFactory too
## Code After:
package edu.umich.lib.solr.analysis;
import java.util.Map;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.analysis.TokenStream;
public class LCCNNormalizerFilterFactory extends TokenFilterFactory
{
public LCCNNormalizerFilterFactory(Map<String, String> args) {
super(args);
}
@Override
public LCCNNormalizerFilter create(TokenStream input)
{
return new LCCNNormalizerFilter(input);
}
}
|
// ... existing code ...
public class LCCNNormalizerFilterFactory extends TokenFilterFactory
{
public LCCNNormalizerFilterFactory(Map<String, String> args) {
super(args);
}
@Override
public LCCNNormalizerFilter create(TokenStream input)
{
return new LCCNNormalizerFilter(input);
// ... rest of the code ...
|
4bb6e49b61ee27bc8c9fde2c62823cbd4fc3249b
|
src/main/java/org/scribe/extractors/JsonTokenExtractor.java
|
src/main/java/org/scribe/extractors/JsonTokenExtractor.java
|
package org.scribe.extractors;
import java.util.regex.*;
import org.scribe.exceptions.*;
import org.scribe.model.*;
import org.scribe.utils.*;
public class JsonTokenExtractor implements AccessTokenExtractor
{
private Pattern accessTokenPattern = Pattern.compile("\"access_token\":\\s*\"(\\S*?)\"");
public Token extract(String response)
{
Preconditions.checkEmptyString(response, "Cannot extract a token from a null or empty String");
Matcher matcher = accessTokenPattern.matcher(response);
if(matcher.find())
{
return new Token(matcher.group(1), "", response);
}
else
{
throw new OAuthException("Cannot extract an acces token. Response was: " + response);
}
}
}
|
package org.scribe.extractors;
import java.util.regex.*;
import org.scribe.exceptions.*;
import org.scribe.model.*;
import org.scribe.utils.*;
public class JsonTokenExtractor implements AccessTokenExtractor
{
private Pattern accessTokenPattern = Pattern.compile("\"access_token\":\\s*\"(\\S*?)\"");
@Override
public Token extract(String response)
{
Preconditions.checkEmptyString(response, "Cannot extract a token from a null or empty String");
Matcher matcher = accessTokenPattern.matcher(response);
if(matcher.find())
{
return new Token(matcher.group(1), "", response);
}
else
{
throw new OAuthException("Cannot extract an acces token. Response was: " + response);
}
}
}
|
Revert "Removed the @Override annotation as it resulted in a compilation error."
|
Revert "Removed the @Override annotation as it resulted in a compilation error."
This reverts commit 4aa62564872d9c7a1c494eba507fbbc6f99469d1.
|
Java
|
mit
|
chooco13/scribejava,apakulov/scribe-java,adamnfish/scribe-java,allenj/scribe-java,matsumo/scribe-java,alexsmirnov/scribe-java,ludovicc/scribe-java,rpuch/scribe-java,adamnfish/scribe-java,Vreasy/scribe-java,dsyer/scribe-java,mcxiaoke/oauth-simple,vincentjames501/scribe-java,Dictanova/scribe-java,fengshao0907/scribe-java,visun/scribeoauth,ardock/subscribe,vincentjames501/scribe-java,cdryden/scribe-java,luiz158/scribe-java,cndoublehero/scribe-java,luiz158/scribe-java,sethhitch/scribe-java,jplock/scribe-java,visun/scribeoauth,madhur/scribe-java,Kuohong/scribe-java,codepath/scribe-java,brettwooldridge/scribe-java,xtang/scribe-java,robertoestivill/scribe-java,Dannemannet/scribe-java,Dictanova/scribe-java,ruixie/scribe-java,krishna-satyamurthy-hp-com/scribe-java,robertoestivill/scribe-java,Vreasy/scribe-java,joelittlejohn/scribe-java,lethalbrains/scribe-java,songfj/scribe-java,bistri/scribe-java,gsun3000/scribe-java,leleuj/scribe-java,philihp/scribe-java,daclouds/scribe-java,fluxtream/scribe-java,skela/scribe-java,nekosune/scribe-java,XeroAPI/scribe-java,developez/scribe-java,krishna-satyamurthy-hp-com/scribe-java,KodeSeeker/GmailRcptScanv1,adamnengland/scribe-java,Kailashrb/scribe-java,mcxiaoke/oauth-simple,InQBarna/scribe-java,daclouds/scribe-java,allenj/scribe-java,causecode/scribe-java,Huddle/scribe-java,qchamp/scribe-java,gsun3000/scribe-java,sethhitch/scribe-java,fernandezpablo85/scribe-java,philihp/scribe-java,ruixie/scribe-java,apakulov/scribe-java,qchamp/scribe-java,scribejava/scribejava,dsyer/scribe-java,timvoet/scribe-java,ardock/scribe-java,bipanpreet/java,skela/scribe-java,xtang/scribe-java,ardock/scribe-java,ZCube/scribe-java,cruiser/scribe-java,Huddle/scribe-java,cdryden/scribe-java,fluxtream/scribe-java,arriolac/scribe-java,zsigmond-czine-everit/scribe-java,xclydes/scribe-java,adamnengland/scribe-java,InQBarna/scribe-java,ZCube/scribe-java,KodeSeeker/GmailRcptScanv1,coryshrmn/scribe-java,leleuj/scribe-java,cruiser/scribe-java,markus1978/scribe-java,cng1985/scribejava,madhur/scribe-java,codepath/scribe-java,joelittlejohn/scribe-java,timvoet/scribe-java,lethalbrains/scribe-java,developez/scribe-java,matsumo/scribe-java,zsigmond-czine-everit/scribe-java,fengshao0907/scribe-java,pratikpparikh/scribe-java,Kailashrb/scribe-java,songfj/scribe-java,causecode/scribe-java,Dannemannet/scribe-java,chaitrarao04/scribe-java,nekosune/scribe-java,xclydes/scribe-java,matthias-m/scribe-java,pratikpparikh/scribe-java,jplock/scribe-java,Kuohong/scribe-java,brettwooldridge/scribe-java,rpuch/scribe-java,mlaccetti/scribe-java,mebigfatguy/scribe-java,coryshrmn/scribe-java,cndoublehero/scribe-java,matthias-m/scribe-java,ludovicc/scribe-java,mebigfatguy/scribe-java,markus1978/scribe-java,arriolac/scribe-java,chaitrarao04/scribe-java,alexsmirnov/scribe-java
|
java
|
## Code Before:
package org.scribe.extractors;
import java.util.regex.*;
import org.scribe.exceptions.*;
import org.scribe.model.*;
import org.scribe.utils.*;
public class JsonTokenExtractor implements AccessTokenExtractor
{
private Pattern accessTokenPattern = Pattern.compile("\"access_token\":\\s*\"(\\S*?)\"");
public Token extract(String response)
{
Preconditions.checkEmptyString(response, "Cannot extract a token from a null or empty String");
Matcher matcher = accessTokenPattern.matcher(response);
if(matcher.find())
{
return new Token(matcher.group(1), "", response);
}
else
{
throw new OAuthException("Cannot extract an acces token. Response was: " + response);
}
}
}
## Instruction:
Revert "Removed the @Override annotation as it resulted in a compilation error."
This reverts commit 4aa62564872d9c7a1c494eba507fbbc6f99469d1.
## Code After:
package org.scribe.extractors;
import java.util.regex.*;
import org.scribe.exceptions.*;
import org.scribe.model.*;
import org.scribe.utils.*;
public class JsonTokenExtractor implements AccessTokenExtractor
{
private Pattern accessTokenPattern = Pattern.compile("\"access_token\":\\s*\"(\\S*?)\"");
@Override
public Token extract(String response)
{
Preconditions.checkEmptyString(response, "Cannot extract a token from a null or empty String");
Matcher matcher = accessTokenPattern.matcher(response);
if(matcher.find())
{
return new Token(matcher.group(1), "", response);
}
else
{
throw new OAuthException("Cannot extract an acces token. Response was: " + response);
}
}
}
|
# ... existing code ...
{
private Pattern accessTokenPattern = Pattern.compile("\"access_token\":\\s*\"(\\S*?)\"");
@Override
public Token extract(String response)
{
Preconditions.checkEmptyString(response, "Cannot extract a token from a null or empty String");
# ... rest of the code ...
|
4927a1c29d258b1ab7c70ffecff6904b808480eb
|
bokeh/validation/warnings.py
|
bokeh/validation/warnings.py
|
''' Define standard warning codes and messages for Bokeh validation checks.
1000 : *MISSING_RENDERERS*
A |Plot| object has no renderers configured (will result in a blank plot).
1001 : *NO_GLYPH_RENDERERS*
A |Plot| object has no glyph renderers (will result in an empty plot frame).
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
9999 : *EXT*
Indicates that a custom warning check has failed.
'''
codes = {
1000: ("MISSING_RENDERERS", "Plot has no renderers"),
1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
1002: ("EMPTY_LAYOUT", "Layout has no children"),
1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
9999: ("EXT", "Custom extension reports warning"),
}
for code in codes:
exec("%s = %d" % (codes[code][0], code))
|
''' Define standard warning codes and messages for Bokeh validation checks.
1000 : *MISSING_RENDERERS*
A |Plot| object has no renderers configured (will result in a blank plot).
1001 : *NO_GLYPH_RENDERERS*
A |Plot| object has no glyph renderers (will result in an empty plot frame).
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
1003 : *COLON_IN_CATEGORY_LABEL*
Category label contains colons (will result in a blank layout).
9999 : *EXT*
Indicates that a custom warning check has failed.
'''
codes = {
1000: ("MISSING_RENDERERS", "Plot has no renderers"),
1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
1002: ("EMPTY_LAYOUT", "Layout has no children"),
1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
9999: ("EXT", "Custom extension reports warning"),
}
for code in codes:
exec("%s = %d" % (codes[code][0], code))
|
Add module level documentation for colon warning
|
Add module level documentation for colon warning
|
Python
|
bsd-3-clause
|
ericmjl/bokeh,timsnyder/bokeh,draperjames/bokeh,philippjfr/bokeh,mindriot101/bokeh,aavanian/bokeh,aavanian/bokeh,phobson/bokeh,daodaoliang/bokeh,KasperPRasmussen/bokeh,rothnic/bokeh,dennisobrien/bokeh,phobson/bokeh,DuCorey/bokeh,timsnyder/bokeh,jplourenco/bokeh,ericdill/bokeh,srinathv/bokeh,bokeh/bokeh,srinathv/bokeh,justacec/bokeh,clairetang6/bokeh,DuCorey/bokeh,ChinaQuants/bokeh,paultcochrane/bokeh,deeplook/bokeh,mindriot101/bokeh,ericdill/bokeh,philippjfr/bokeh,dennisobrien/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,htygithub/bokeh,khkaminska/bokeh,ptitjano/bokeh,saifrahmed/bokeh,ericmjl/bokeh,matbra/bokeh,schoolie/bokeh,justacec/bokeh,jakirkham/bokeh,saifrahmed/bokeh,rothnic/bokeh,aavanian/bokeh,aiguofer/bokeh,draperjames/bokeh,rs2/bokeh,clairetang6/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,schoolie/bokeh,rothnic/bokeh,ericdill/bokeh,jakirkham/bokeh,azjps/bokeh,gpfreitas/bokeh,khkaminska/bokeh,srinathv/bokeh,evidation-health/bokeh,aavanian/bokeh,bokeh/bokeh,jakirkham/bokeh,jplourenco/bokeh,daodaoliang/bokeh,stonebig/bokeh,mindriot101/bokeh,deeplook/bokeh,tacaswell/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,aiguofer/bokeh,timsnyder/bokeh,ChinaQuants/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,evidation-health/bokeh,percyfal/bokeh,khkaminska/bokeh,stonebig/bokeh,matbra/bokeh,quasiben/bokeh,muku42/bokeh,xguse/bokeh,percyfal/bokeh,phobson/bokeh,muku42/bokeh,mindriot101/bokeh,timsnyder/bokeh,quasiben/bokeh,ptitjano/bokeh,philippjfr/bokeh,DuCorey/bokeh,rs2/bokeh,ptitjano/bokeh,paultcochrane/bokeh,philippjfr/bokeh,schoolie/bokeh,azjps/bokeh,KasperPRasmussen/bokeh,ChinaQuants/bokeh,maxalbert/bokeh,xguse/bokeh,Karel-van-de-Plassche/bokeh,muku42/bokeh,dennisobrien/bokeh,muku42/bokeh,tacaswell/bokeh,matbra/bokeh,gpfreitas/bokeh,paultcochrane/bokeh,Karel-van-de-Plassche/bokeh,bokeh/bokeh,ericmjl/bokeh,jakirkham/bokeh,rs2/bokeh,timsnyder/bokeh,msarahan/bokeh,azjps/bokeh,evidation-health/bokeh,htygithub/bokeh,ChinaQuants/bokeh,htygithub/bokeh,ericdill/bokeh,draperjames/bokeh,maxalbert/bokeh,percyfal/bokeh,rs2/bokeh,phobson/bokeh,bokeh/bokeh,phobson/bokeh,xguse/bokeh,gpfreitas/bokeh,maxalbert/bokeh,daodaoliang/bokeh,quasiben/bokeh,evidation-health/bokeh,deeplook/bokeh,khkaminska/bokeh,jplourenco/bokeh,percyfal/bokeh,msarahan/bokeh,stonebig/bokeh,paultcochrane/bokeh,htygithub/bokeh,maxalbert/bokeh,matbra/bokeh,msarahan/bokeh,srinathv/bokeh,DuCorey/bokeh,saifrahmed/bokeh,ptitjano/bokeh,dennisobrien/bokeh,jplourenco/bokeh,daodaoliang/bokeh,DuCorey/bokeh,dennisobrien/bokeh,gpfreitas/bokeh,stonebig/bokeh,philippjfr/bokeh,tacaswell/bokeh,justacec/bokeh,jakirkham/bokeh,draperjames/bokeh,KasperPRasmussen/bokeh,msarahan/bokeh,justacec/bokeh,clairetang6/bokeh,tacaswell/bokeh,xguse/bokeh,azjps/bokeh,draperjames/bokeh,rs2/bokeh,KasperPRasmussen/bokeh,bokeh/bokeh,clairetang6/bokeh,aiguofer/bokeh,aiguofer/bokeh,azjps/bokeh,ericmjl/bokeh,deeplook/bokeh,aiguofer/bokeh,rothnic/bokeh,aavanian/bokeh,saifrahmed/bokeh
|
python
|
## Code Before:
''' Define standard warning codes and messages for Bokeh validation checks.
1000 : *MISSING_RENDERERS*
A |Plot| object has no renderers configured (will result in a blank plot).
1001 : *NO_GLYPH_RENDERERS*
A |Plot| object has no glyph renderers (will result in an empty plot frame).
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
9999 : *EXT*
Indicates that a custom warning check has failed.
'''
codes = {
1000: ("MISSING_RENDERERS", "Plot has no renderers"),
1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
1002: ("EMPTY_LAYOUT", "Layout has no children"),
1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
9999: ("EXT", "Custom extension reports warning"),
}
for code in codes:
exec("%s = %d" % (codes[code][0], code))
## Instruction:
Add module level documentation for colon warning
## Code After:
''' Define standard warning codes and messages for Bokeh validation checks.
1000 : *MISSING_RENDERERS*
A |Plot| object has no renderers configured (will result in a blank plot).
1001 : *NO_GLYPH_RENDERERS*
A |Plot| object has no glyph renderers (will result in an empty plot frame).
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
1003 : *COLON_IN_CATEGORY_LABEL*
Category label contains colons (will result in a blank layout).
9999 : *EXT*
Indicates that a custom warning check has failed.
'''
codes = {
1000: ("MISSING_RENDERERS", "Plot has no renderers"),
1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
1002: ("EMPTY_LAYOUT", "Layout has no children"),
1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
9999: ("EXT", "Custom extension reports warning"),
}
for code in codes:
exec("%s = %d" % (codes[code][0], code))
|
# ... existing code ...
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
1003 : *COLON_IN_CATEGORY_LABEL*
Category label contains colons (will result in a blank layout).
9999 : *EXT*
Indicates that a custom warning check has failed.
# ... modified code ...
'''
codes = {
1000: ("MISSING_RENDERERS", "Plot has no renderers"),
1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
1002: ("EMPTY_LAYOUT", "Layout has no children"),
1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
9999: ("EXT", "Custom extension reports warning"),
}
for code in codes:
# ... rest of the code ...
|
effb5d796e55dbf28de2ec8d6711fcf2724bc62f
|
src/core/migrations/0059_auto_20211013_1657.py
|
src/core/migrations/0059_auto_20211013_1657.py
|
from __future__ import unicode_literals
from django.db import migrations
def set_about_plugin_to_hpe(apps, schema_editor):
Plugin = apps.get_model('utils', 'Plugin')
Plugin.objects.filter(
name='About',
).update(
homepage_element=True,
)
class Migration(migrations.Migration):
dependencies = [
('core', '0058_merge_20211005_0909'),
]
operations = [
migrations.RunPython(set_about_plugin_to_hpe, reverse_code=migrations.RunPython.noop)
]
|
from __future__ import unicode_literals
from django.db import migrations
def set_about_plugin_to_hpe(apps, schema_editor):
Plugin = apps.get_model('utils', 'Plugin')
Plugin.objects.filter(
name='About',
).update(
homepage_element=True,
)
class Migration(migrations.Migration):
dependencies = [
('core', '0058_merge_20211005_0909'),
('utils', '0023_upgrade_1_4_0'),
]
operations = [
migrations.RunPython(set_about_plugin_to_hpe, reverse_code=migrations.RunPython.noop)
]
|
Fix missing dependency on core.0059 migration
|
Fix missing dependency on core.0059 migration
|
Python
|
agpl-3.0
|
BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
def set_about_plugin_to_hpe(apps, schema_editor):
Plugin = apps.get_model('utils', 'Plugin')
Plugin.objects.filter(
name='About',
).update(
homepage_element=True,
)
class Migration(migrations.Migration):
dependencies = [
('core', '0058_merge_20211005_0909'),
]
operations = [
migrations.RunPython(set_about_plugin_to_hpe, reverse_code=migrations.RunPython.noop)
]
## Instruction:
Fix missing dependency on core.0059 migration
## Code After:
from __future__ import unicode_literals
from django.db import migrations
def set_about_plugin_to_hpe(apps, schema_editor):
Plugin = apps.get_model('utils', 'Plugin')
Plugin.objects.filter(
name='About',
).update(
homepage_element=True,
)
class Migration(migrations.Migration):
dependencies = [
('core', '0058_merge_20211005_0909'),
('utils', '0023_upgrade_1_4_0'),
]
operations = [
migrations.RunPython(set_about_plugin_to_hpe, reverse_code=migrations.RunPython.noop)
]
|
// ... existing code ...
dependencies = [
('core', '0058_merge_20211005_0909'),
('utils', '0023_upgrade_1_4_0'),
]
operations = [
// ... rest of the code ...
|
fd11e57f736fff6ef23972dee642554c6e8f5495
|
urls.py
|
urls.py
|
from django.conf.urls import url
from . import views
from django.conf import settings
urlpatterns=[
url(r'^login/$', views.login, name='login'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^users/$', views.users, name='users'),
url(r'^users/(?P<username>[^/]*)/$', views.user, name='user'),
]
|
from django.conf.urls import url
from . import views
from django.conf import settings
urlpatterns=[
url(r'^login/$', views.login, name='login'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^users/$', views.users, name='users'),
url(r'^users/(?P<user>[^/]*)/$', views.user, name='user'),
url(r'^users/(?P<user>[^/]*)/prefs/$', views.prefs, name='prefs'),
url(r'^users/(?P<user>[^/]*)/prefs/(?P<prefId>[^/]*)/$', views.pref, name='pref'),
]
|
Add the prefs and pref url
|
Add the prefs and pref url
|
Python
|
apache-2.0
|
kensonman/webframe,kensonman/webframe,kensonman/webframe
|
python
|
## Code Before:
from django.conf.urls import url
from . import views
from django.conf import settings
urlpatterns=[
url(r'^login/$', views.login, name='login'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^users/$', views.users, name='users'),
url(r'^users/(?P<username>[^/]*)/$', views.user, name='user'),
]
## Instruction:
Add the prefs and pref url
## Code After:
from django.conf.urls import url
from . import views
from django.conf import settings
urlpatterns=[
url(r'^login/$', views.login, name='login'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^users/$', views.users, name='users'),
url(r'^users/(?P<user>[^/]*)/$', views.user, name='user'),
url(r'^users/(?P<user>[^/]*)/prefs/$', views.prefs, name='prefs'),
url(r'^users/(?P<user>[^/]*)/prefs/(?P<prefId>[^/]*)/$', views.pref, name='pref'),
]
|
...
url(r'^login/$', views.login, name='login'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^users/$', views.users, name='users'),
url(r'^users/(?P<user>[^/]*)/$', views.user, name='user'),
url(r'^users/(?P<user>[^/]*)/prefs/$', views.prefs, name='prefs'),
url(r'^users/(?P<user>[^/]*)/prefs/(?P<prefId>[^/]*)/$', views.pref, name='pref'),
]
...
|
6c9640cf0e9e8e187a61fc81f6c0eed0988601e1
|
apps/accounts/views.py
|
apps/accounts/views.py
|
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from .models import UserProfile
class UserProfileBase(object):
model = UserProfile
class UserProfileList(UserProfileBase, ListView):
pass
class UserProfileDetail(UserProfileBase, DetailView):
pass
|
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from .models import UserProfile
class UserProfileBase(object):
queryset = UserProfile.objects.all().select_related('user')
class UserProfileList(UserProfileBase, ListView):
pass
class UserProfileDetail(UserProfileBase, DetailView):
pass
|
Make sure the 'user' object is available in the UserProfile queryset in the view.
|
Make sure the 'user' object is available in the UserProfile queryset in the view.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
python
|
## Code Before:
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from .models import UserProfile
class UserProfileBase(object):
model = UserProfile
class UserProfileList(UserProfileBase, ListView):
pass
class UserProfileDetail(UserProfileBase, DetailView):
pass
## Instruction:
Make sure the 'user' object is available in the UserProfile queryset in the view.
## Code After:
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from .models import UserProfile
class UserProfileBase(object):
queryset = UserProfile.objects.all().select_related('user')
class UserProfileList(UserProfileBase, ListView):
pass
class UserProfileDetail(UserProfileBase, DetailView):
pass
|
...
class UserProfileBase(object):
queryset = UserProfile.objects.all().select_related('user')
class UserProfileList(UserProfileBase, ListView):
...
|
ae2abc0bda94fd1e626223d3ee25e5dc21e94c00
|
orc/orconce.c
|
orc/orconce.c
|
static pthread_mutex_t once_mutex = PTHREAD_MUTEX_INITIALIZER;
void
_orc_once_init (void)
{
}
void
orc_once_mutex_lock (void)
{
pthread_mutex_lock (&once_mutex);
}
void
orc_once_mutex_unlock (void)
{
pthread_mutex_unlock (&once_mutex);
}
#elif defined(HAVE_THREAD_WIN32)
#include <windows.h>
static CRITICAL_SECTION once_mutex;
void
_orc_once_init (void)
{
InitializeCriticalSection (&once_mutex);
}
void
orc_once_mutex_lock (void)
{
EnterCriticalSection (&once_mutex);
}
void
orc_once_mutex_unlock (void)
{
LeaveCriticalSection (&once_mutex);
}
#else
void
_orc_once_init (void)
{
}
void
orc_once_mutex_lock (void)
{
}
void
orc_once_mutex_unlock (void)
{
}
#endif
|
static pthread_mutex_t once_mutex = PTHREAD_MUTEX_INITIALIZER;
void
_orc_once_init (void)
{
}
void
orc_once_mutex_lock (void)
{
pthread_mutex_lock (&once_mutex);
}
void
orc_once_mutex_unlock (void)
{
pthread_mutex_unlock (&once_mutex);
}
#elif defined(HAVE_THREAD_WIN32)
#include <windows.h>
static CRITICAL_SECTION once_mutex;
void
_orc_once_init (void)
{
}
void
orc_once_mutex_lock (void)
{
EnterCriticalSection (&once_mutex);
}
void
orc_once_mutex_unlock (void)
{
LeaveCriticalSection (&once_mutex);
}
int
DllMain (HINSTANCE hInstance, DWORD dwReason, LPVOID lpReserved)
{
if (dwReason == DLL_PROCESS_ATTACH) {
InitializeCriticalSection (&once_mutex);
}
return 1;
}
#else
void
_orc_once_init (void)
{
}
void
orc_once_mutex_lock (void)
{
}
void
orc_once_mutex_unlock (void)
{
}
#endif
|
Fix initialization of once mutex
|
win32: Fix initialization of once mutex
|
C
|
bsd-3-clause
|
okuoku/nmosh-orc,MOXfiles/orc,okuoku/nmosh-orc,jpakkane/orc,MOXfiles/orc,mojaves/orc,Distrotech/orc,Distrotech/orc,Distrotech/orc,ahmedammar/platform_external_gst_liborc,mojaves/orc,okuoku/nmosh-orc,ahmedammar/platform_external_gst_liborc,ijsf/OpenWebRTC-orc,jpakkane/orc,jpakkane/orc,mojaves/orc,ijsf/OpenWebRTC-orc,ijsf/OpenWebRTC-orc,MOXfiles/orc
|
c
|
## Code Before:
static pthread_mutex_t once_mutex = PTHREAD_MUTEX_INITIALIZER;
void
_orc_once_init (void)
{
}
void
orc_once_mutex_lock (void)
{
pthread_mutex_lock (&once_mutex);
}
void
orc_once_mutex_unlock (void)
{
pthread_mutex_unlock (&once_mutex);
}
#elif defined(HAVE_THREAD_WIN32)
#include <windows.h>
static CRITICAL_SECTION once_mutex;
void
_orc_once_init (void)
{
InitializeCriticalSection (&once_mutex);
}
void
orc_once_mutex_lock (void)
{
EnterCriticalSection (&once_mutex);
}
void
orc_once_mutex_unlock (void)
{
LeaveCriticalSection (&once_mutex);
}
#else
void
_orc_once_init (void)
{
}
void
orc_once_mutex_lock (void)
{
}
void
orc_once_mutex_unlock (void)
{
}
#endif
## Instruction:
win32: Fix initialization of once mutex
## Code After:
static pthread_mutex_t once_mutex = PTHREAD_MUTEX_INITIALIZER;
void
_orc_once_init (void)
{
}
void
orc_once_mutex_lock (void)
{
pthread_mutex_lock (&once_mutex);
}
void
orc_once_mutex_unlock (void)
{
pthread_mutex_unlock (&once_mutex);
}
#elif defined(HAVE_THREAD_WIN32)
#include <windows.h>
static CRITICAL_SECTION once_mutex;
void
_orc_once_init (void)
{
}
void
orc_once_mutex_lock (void)
{
EnterCriticalSection (&once_mutex);
}
void
orc_once_mutex_unlock (void)
{
LeaveCriticalSection (&once_mutex);
}
int
DllMain (HINSTANCE hInstance, DWORD dwReason, LPVOID lpReserved)
{
if (dwReason == DLL_PROCESS_ATTACH) {
InitializeCriticalSection (&once_mutex);
}
return 1;
}
#else
void
_orc_once_init (void)
{
}
void
orc_once_mutex_lock (void)
{
}
void
orc_once_mutex_unlock (void)
{
}
#endif
|
# ... existing code ...
void
_orc_once_init (void)
{
}
void
# ... modified code ...
orc_once_mutex_unlock (void)
{
LeaveCriticalSection (&once_mutex);
}
int
DllMain (HINSTANCE hInstance, DWORD dwReason, LPVOID lpReserved)
{
if (dwReason == DLL_PROCESS_ATTACH) {
InitializeCriticalSection (&once_mutex);
}
return 1;
}
#else
# ... rest of the code ...
|
245c1cc1411a9c58de9ed3c1b06110e87541e420
|
src/main/kotlin/leetcode/Problem1954.kt
|
src/main/kotlin/leetcode/Problem1954.kt
|
package leetcode
/**
* https://leetcode.com/problems/minimum-garden-perimeter-to-collect-enough-apples/
*/
class Problem1954 {
fun minimumPerimeter(neededApples: Long): Long {
var answer = 0L
var lo = 1L
var hi = neededApples
while (lo <= hi) {
val mid = lo + ((hi - lo) / 2)
// When it overflows, it will produce a negative number.
val numApples = numApples(mid)
println("$lo < $mid < $hi --> $numApples")
if (numApples < 0 || neededApples <= numApples) {
answer = mid
hi = mid - 1
} else {
lo = mid + 1
}
}
return answer * 8
}
fun numApples(n: Long): Long {
return 2 * (n * (n + 1) * (2 * n + 1))
}
}
|
package leetcode
/**
* https://leetcode.com/problems/minimum-garden-perimeter-to-collect-enough-apples/
*/
class Problem1954 {
fun minimumPerimeter(neededApples: Long): Long {
var answer = 0L
var lo = 1L
var hi = neededApples
while (lo <= hi) {
val mid = lo + ((hi - lo) / 2)
// When it overflows, it will produce a negative number.
val numApples = numApples(mid)
if (numApples < 0 || neededApples <= numApples) {
answer = mid
hi = mid - 1
} else {
lo = mid + 1
}
}
return answer * 8
}
fun numApples(n: Long): Long {
return 2 * (n * (n + 1) * (2 * n + 1))
}
}
|
Remove println in problem 1954
|
Remove println in problem 1954
|
Kotlin
|
mit
|
fredyw/leetcode,fredyw/leetcode,fredyw/leetcode,fredyw/leetcode
|
kotlin
|
## Code Before:
package leetcode
/**
* https://leetcode.com/problems/minimum-garden-perimeter-to-collect-enough-apples/
*/
class Problem1954 {
fun minimumPerimeter(neededApples: Long): Long {
var answer = 0L
var lo = 1L
var hi = neededApples
while (lo <= hi) {
val mid = lo + ((hi - lo) / 2)
// When it overflows, it will produce a negative number.
val numApples = numApples(mid)
println("$lo < $mid < $hi --> $numApples")
if (numApples < 0 || neededApples <= numApples) {
answer = mid
hi = mid - 1
} else {
lo = mid + 1
}
}
return answer * 8
}
fun numApples(n: Long): Long {
return 2 * (n * (n + 1) * (2 * n + 1))
}
}
## Instruction:
Remove println in problem 1954
## Code After:
package leetcode
/**
* https://leetcode.com/problems/minimum-garden-perimeter-to-collect-enough-apples/
*/
class Problem1954 {
fun minimumPerimeter(neededApples: Long): Long {
var answer = 0L
var lo = 1L
var hi = neededApples
while (lo <= hi) {
val mid = lo + ((hi - lo) / 2)
// When it overflows, it will produce a negative number.
val numApples = numApples(mid)
if (numApples < 0 || neededApples <= numApples) {
answer = mid
hi = mid - 1
} else {
lo = mid + 1
}
}
return answer * 8
}
fun numApples(n: Long): Long {
return 2 * (n * (n + 1) * (2 * n + 1))
}
}
|
...
val mid = lo + ((hi - lo) / 2)
// When it overflows, it will produce a negative number.
val numApples = numApples(mid)
if (numApples < 0 || neededApples <= numApples) {
answer = mid
hi = mid - 1
...
|
f14107b723bcf62b327b10d8726b2bf8ef2031eb
|
tests/test_manifest_delivery_base.py
|
tests/test_manifest_delivery_base.py
|
import yaml
from app.config import QueueNames
def test_queue_names_set_in_manifest_delivery_base_correctly():
with open("manifest-delivery-base.yml", 'r') as stream:
search = ' -Q '
yml_commands = [y['command'] for y in yaml.load(stream)['applications']]
watched_queues = set()
for command in yml_commands:
start_of_queue_arg = command.find(search)
if start_of_queue_arg > 0:
start_of_queue_names = start_of_queue_arg + len(search)
queues = command[start_of_queue_names:].split(',')
watched_queues.update(queues)
# ses-callbacks isn't used in api (only used in SNS lambda)
ignored_queues = {'ses-callbacks'}
watched_queues -= ignored_queues
assert watched_queues == set(QueueNames.all_queues())
|
import yaml
from app.config import QueueNames
def test_queue_names_set_in_manifest_delivery_base_correctly():
with open("manifest-delivery-base.yml", 'r') as stream:
search = ' -Q '
yml_commands = [y['command'] for y in yaml.load(stream)['applications']]
watched_queues = set()
for command in yml_commands:
start_of_queue_arg = command.find(search)
if start_of_queue_arg > 0:
start_of_queue_names = start_of_queue_arg + len(search)
queues = command[start_of_queue_names:].split(',')
for q in queues:
if "2>" in q:
q = q.split("2>")[0].strip()
watched_queues.add(q)
# ses-callbacks isn't used in api (only used in SNS lambda)
ignored_queues = {'ses-callbacks'}
watched_queues -= ignored_queues
assert watched_queues == set(QueueNames.all_queues())
|
Fix test that checks queues
|
Fix test that checks queues
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
python
|
## Code Before:
import yaml
from app.config import QueueNames
def test_queue_names_set_in_manifest_delivery_base_correctly():
with open("manifest-delivery-base.yml", 'r') as stream:
search = ' -Q '
yml_commands = [y['command'] for y in yaml.load(stream)['applications']]
watched_queues = set()
for command in yml_commands:
start_of_queue_arg = command.find(search)
if start_of_queue_arg > 0:
start_of_queue_names = start_of_queue_arg + len(search)
queues = command[start_of_queue_names:].split(',')
watched_queues.update(queues)
# ses-callbacks isn't used in api (only used in SNS lambda)
ignored_queues = {'ses-callbacks'}
watched_queues -= ignored_queues
assert watched_queues == set(QueueNames.all_queues())
## Instruction:
Fix test that checks queues
## Code After:
import yaml
from app.config import QueueNames
def test_queue_names_set_in_manifest_delivery_base_correctly():
with open("manifest-delivery-base.yml", 'r') as stream:
search = ' -Q '
yml_commands = [y['command'] for y in yaml.load(stream)['applications']]
watched_queues = set()
for command in yml_commands:
start_of_queue_arg = command.find(search)
if start_of_queue_arg > 0:
start_of_queue_names = start_of_queue_arg + len(search)
queues = command[start_of_queue_names:].split(',')
for q in queues:
if "2>" in q:
q = q.split("2>")[0].strip()
watched_queues.add(q)
# ses-callbacks isn't used in api (only used in SNS lambda)
ignored_queues = {'ses-callbacks'}
watched_queues -= ignored_queues
assert watched_queues == set(QueueNames.all_queues())
|
# ... existing code ...
if start_of_queue_arg > 0:
start_of_queue_names = start_of_queue_arg + len(search)
queues = command[start_of_queue_names:].split(',')
for q in queues:
if "2>" in q:
q = q.split("2>")[0].strip()
watched_queues.add(q)
# ses-callbacks isn't used in api (only used in SNS lambda)
ignored_queues = {'ses-callbacks'}
# ... rest of the code ...
|
4af5e3c9e48ff997084d5316f384b3c6411edaa3
|
test/dump.py
|
test/dump.py
|
import subprocess
from tempfile import NamedTemporaryFile
def dump_table(table):
def handle_record(record, fields):
def handle_field(column, field):
return '%d:%d:%d %s' % (record, column, len(field), field)
return '\n'.join(handle_field(column, field) for (column, field) in
enumerate(fields))
dump = '\n'.join(handle_record(record, fields) for (record, fields) in
enumerate(table))
return '\n'.join(filter(lambda s: s != '', dump.splitlines()))
def dump_text(text, b=False, d=None, e=None, q=None):
with NamedTemporaryFile() as outfile:
outfile.write(text)
outfile.flush()
return _dump(outfile.name, b, d, e, q).strip('\n')
def _dump(filename, b=False, d=None, e=None, q=None):
args = ['./dump']
if b:
args.append('-b')
if d:
args.append('-d%s' % d)
if e:
args.append('-e%s' % e)
if q:
args.append('-q%s' % q)
args.append(filename)
return _popen(args)
def _popen(args):
pipe = subprocess.PIPE
process = subprocess.Popen(args, stdout=pipe, stderr=pipe)
stdout, stderr = process.communicate()
return stderr if len(stderr) > 0 else stdout
|
import subprocess
import tempfile
def dump_table(table):
def handle_record(record, fields):
def handle_field(column, field):
return '%d:%d:%d %s' % (record, column, len(field), field)
return '\n'.join(handle_field(column, field) for (column, field) in
enumerate(fields))
dump = '\n'.join(handle_record(record, fields) for (record, fields) in
enumerate(table))
return '\n'.join(filter(lambda s: s != '', dump.splitlines()))
def dump_text(text, b=False, d=None, e=None, q=None):
with tempfile.NamedTemporaryFile() as outfile:
outfile.write(text)
outfile.flush()
return _dump(outfile.name, b, d, e, q).strip('\n')
def _dump(filename, b=False, d=None, e=None, q=None):
args = ['./dump']
if b:
args.append('-b')
if d:
args.append('-d%s' % d)
if e:
args.append('-e%s' % e)
if q:
args.append('-q%s' % q)
args.append(filename)
return _popen(args)
def _popen(args):
pipe = subprocess.PIPE
process = subprocess.Popen(args, stdout=pipe, stderr=pipe)
stdout, stderr = process.communicate()
return stderr if len(stderr) > 0 else stdout
|
Use 'import' for 'tempfile' module
|
Use 'import' for 'tempfile' module
|
Python
|
mit
|
jvirtanen/fields,jvirtanen/fields
|
python
|
## Code Before:
import subprocess
from tempfile import NamedTemporaryFile
def dump_table(table):
def handle_record(record, fields):
def handle_field(column, field):
return '%d:%d:%d %s' % (record, column, len(field), field)
return '\n'.join(handle_field(column, field) for (column, field) in
enumerate(fields))
dump = '\n'.join(handle_record(record, fields) for (record, fields) in
enumerate(table))
return '\n'.join(filter(lambda s: s != '', dump.splitlines()))
def dump_text(text, b=False, d=None, e=None, q=None):
with NamedTemporaryFile() as outfile:
outfile.write(text)
outfile.flush()
return _dump(outfile.name, b, d, e, q).strip('\n')
def _dump(filename, b=False, d=None, e=None, q=None):
args = ['./dump']
if b:
args.append('-b')
if d:
args.append('-d%s' % d)
if e:
args.append('-e%s' % e)
if q:
args.append('-q%s' % q)
args.append(filename)
return _popen(args)
def _popen(args):
pipe = subprocess.PIPE
process = subprocess.Popen(args, stdout=pipe, stderr=pipe)
stdout, stderr = process.communicate()
return stderr if len(stderr) > 0 else stdout
## Instruction:
Use 'import' for 'tempfile' module
## Code After:
import subprocess
import tempfile
def dump_table(table):
def handle_record(record, fields):
def handle_field(column, field):
return '%d:%d:%d %s' % (record, column, len(field), field)
return '\n'.join(handle_field(column, field) for (column, field) in
enumerate(fields))
dump = '\n'.join(handle_record(record, fields) for (record, fields) in
enumerate(table))
return '\n'.join(filter(lambda s: s != '', dump.splitlines()))
def dump_text(text, b=False, d=None, e=None, q=None):
with tempfile.NamedTemporaryFile() as outfile:
outfile.write(text)
outfile.flush()
return _dump(outfile.name, b, d, e, q).strip('\n')
def _dump(filename, b=False, d=None, e=None, q=None):
args = ['./dump']
if b:
args.append('-b')
if d:
args.append('-d%s' % d)
if e:
args.append('-e%s' % e)
if q:
args.append('-q%s' % q)
args.append(filename)
return _popen(args)
def _popen(args):
pipe = subprocess.PIPE
process = subprocess.Popen(args, stdout=pipe, stderr=pipe)
stdout, stderr = process.communicate()
return stderr if len(stderr) > 0 else stdout
|
...
import subprocess
import tempfile
def dump_table(table):
...
return '\n'.join(filter(lambda s: s != '', dump.splitlines()))
def dump_text(text, b=False, d=None, e=None, q=None):
with tempfile.NamedTemporaryFile() as outfile:
outfile.write(text)
outfile.flush()
return _dump(outfile.name, b, d, e, q).strip('\n')
...
|
28ecad18488928cbd69dbe0143bf8e447e2db2a3
|
src/jhv/src/org/helioviewer/jhv/threads/CancelTask.java
|
src/jhv/src/org/helioviewer/jhv/threads/CancelTask.java
|
package org.helioviewer.jhv.threads;
import java.util.concurrent.FutureTask;
public class CancelTask extends FutureTask<Void> {
public CancelTask(FutureTask<?> abolishTask) {
super(new Runnable() {
private FutureTask<?> abolishTask;
public Runnable init(FutureTask<?> abolishTask) {
this.abolishTask = abolishTask;
return this;
}
@Override
public void run() {
abolishTask.cancel(true);
}
}.init(abolishTask), null);
}
}
|
package org.helioviewer.jhv.threads;
import java.util.concurrent.Callable;
import java.util.concurrent.FutureTask;
public class CancelTask extends FutureTask<Boolean> {
public CancelTask(FutureTask<?> cancelTask) {
super(new Callable<Boolean>() {
private FutureTask<?> _cancelTask;
public Callable init(FutureTask<?> _cancelTask) {
this._cancelTask = _cancelTask;
return this;
}
@Override
public Boolean call() {
return _cancelTask.cancel(true);
}
}.init(cancelTask));
}
}
|
Use Callable to be able to retrieve the result of cancel
|
Use Callable to be able to retrieve the result of cancel
git-svn-id: 4e353c0944fe8da334633afc35765ef362dec675@5095 b4e469a2-07ce-4b26-9273-4d7d95a670c7
|
Java
|
mpl-2.0
|
Helioviewer-Project/JHelioviewer-SWHV,Helioviewer-Project/JHelioviewer-SWHV,Helioviewer-Project/JHelioviewer-SWHV,Helioviewer-Project/JHelioviewer-SWHV,Helioviewer-Project/JHelioviewer-SWHV
|
java
|
## Code Before:
package org.helioviewer.jhv.threads;
import java.util.concurrent.FutureTask;
public class CancelTask extends FutureTask<Void> {
public CancelTask(FutureTask<?> abolishTask) {
super(new Runnable() {
private FutureTask<?> abolishTask;
public Runnable init(FutureTask<?> abolishTask) {
this.abolishTask = abolishTask;
return this;
}
@Override
public void run() {
abolishTask.cancel(true);
}
}.init(abolishTask), null);
}
}
## Instruction:
Use Callable to be able to retrieve the result of cancel
git-svn-id: 4e353c0944fe8da334633afc35765ef362dec675@5095 b4e469a2-07ce-4b26-9273-4d7d95a670c7
## Code After:
package org.helioviewer.jhv.threads;
import java.util.concurrent.Callable;
import java.util.concurrent.FutureTask;
public class CancelTask extends FutureTask<Boolean> {
public CancelTask(FutureTask<?> cancelTask) {
super(new Callable<Boolean>() {
private FutureTask<?> _cancelTask;
public Callable init(FutureTask<?> _cancelTask) {
this._cancelTask = _cancelTask;
return this;
}
@Override
public Boolean call() {
return _cancelTask.cancel(true);
}
}.init(cancelTask));
}
}
|
# ... existing code ...
package org.helioviewer.jhv.threads;
import java.util.concurrent.Callable;
import java.util.concurrent.FutureTask;
public class CancelTask extends FutureTask<Boolean> {
public CancelTask(FutureTask<?> cancelTask) {
super(new Callable<Boolean>() {
private FutureTask<?> _cancelTask;
public Callable init(FutureTask<?> _cancelTask) {
this._cancelTask = _cancelTask;
return this;
}
@Override
public Boolean call() {
return _cancelTask.cancel(true);
}
}.init(cancelTask));
}
}
# ... rest of the code ...
|
c27a1fc4c0251b896667e21a0a88fb44a403242f
|
cistern/migrations.py
|
cistern/migrations.py
|
import os
from playhouse.migrate import *
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=None)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
import datetime
import os
from playhouse.migrate import *
def update():
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=datetime.datetime.now)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
Move migration to a function
|
Move migration to a function
|
Python
|
mit
|
archangelic/cistern
|
python
|
## Code Before:
import os
from playhouse.migrate import *
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=None)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
## Instruction:
Move migration to a function
## Code After:
import datetime
import os
from playhouse.migrate import *
def update():
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=datetime.datetime.now)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
# ... existing code ...
import datetime
import os
from playhouse.migrate import *
def update():
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=datetime.datetime.now)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
# ... rest of the code ...
|
6a8068942d985f0c125749d5f58ad7cb9cd189be
|
scanpointgenerator/linegenerator_step.py
|
scanpointgenerator/linegenerator_step.py
|
from linegenerator import LineGenerator
import math as m
class StepLineGenerator(LineGenerator):
def __init__(self, name, units, start, end, step):
num = int(m.floor((end - start)/step))
super(StepLineGenerator, self).__init__(name, units, start, step, num)
|
from linegenerator import LineGenerator
class StepLineGenerator(LineGenerator):
def __init__(self, name, units, start, end, step):
num = int((end - start)/step) + 1
super(StepLineGenerator, self).__init__(name, units, start, step, num)
|
Add extra point to include start
|
Add extra point to include start
|
Python
|
apache-2.0
|
dls-controls/scanpointgenerator
|
python
|
## Code Before:
from linegenerator import LineGenerator
import math as m
class StepLineGenerator(LineGenerator):
def __init__(self, name, units, start, end, step):
num = int(m.floor((end - start)/step))
super(StepLineGenerator, self).__init__(name, units, start, step, num)
## Instruction:
Add extra point to include start
## Code After:
from linegenerator import LineGenerator
class StepLineGenerator(LineGenerator):
def __init__(self, name, units, start, end, step):
num = int((end - start)/step) + 1
super(StepLineGenerator, self).__init__(name, units, start, step, num)
|
...
from linegenerator import LineGenerator
class StepLineGenerator(LineGenerator):
...
def __init__(self, name, units, start, end, step):
num = int((end - start)/step) + 1
super(StepLineGenerator, self).__init__(name, units, start, step, num)
...
|
5c3007bfdcdaed03167a2b7997df6a98da397aaf
|
core/src/main/kotlin/Formats/YamlOutlineService.kt
|
core/src/main/kotlin/Formats/YamlOutlineService.kt
|
package org.jetbrains.dokka
import com.google.inject.Inject
import java.io.File
class YamlOutlineService @Inject constructor(
val generator: NodeLocationAwareGenerator,
val languageService: LanguageService
) : OutlineFormatService {
override fun getOutlineFileName(location: Location): File = File("${location.path}.yml")
var outlineLevel = 0
override fun appendOutlineHeader(location: Location, node: DocumentationNode, to: StringBuilder) {
val indent = " ".repeat(outlineLevel)
to.appendln("$indent- title: ${languageService.renderName(node)}")
to.appendln("$indent url: ${generator.location(node).path}")
}
override fun appendOutlineLevel(to: StringBuilder, body: () -> Unit) {
val indent = " ".repeat(outlineLevel)
to.appendln("$indent content:")
outlineLevel++
body()
outlineLevel--
}
}
|
package org.jetbrains.dokka
import com.google.inject.Inject
import java.io.File
class YamlOutlineService @Inject constructor(
val generator: NodeLocationAwareGenerator,
val languageService: LanguageService
) : OutlineFormatService {
override fun getOutlineFileName(location: Location): File = File("${location.path}.yml")
var outlineLevel = 0
override fun appendOutlineHeader(location: Location, node: DocumentationNode, to: StringBuilder) {
val indent = " ".repeat(outlineLevel)
to.appendln("$indent- title: ${languageService.renderName(node)}")
to.appendln("$indent url: ${generator.relativePathToLocation(node.path.first(), node)}")
}
override fun appendOutlineLevel(to: StringBuilder, body: () -> Unit) {
val indent = " ".repeat(outlineLevel)
to.appendln("$indent content:")
outlineLevel++
body()
outlineLevel--
}
}
|
Fix yaml outline generation to use relative paths
|
Fix yaml outline generation to use relative paths
|
Kotlin
|
apache-2.0
|
Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka
|
kotlin
|
## Code Before:
package org.jetbrains.dokka
import com.google.inject.Inject
import java.io.File
class YamlOutlineService @Inject constructor(
val generator: NodeLocationAwareGenerator,
val languageService: LanguageService
) : OutlineFormatService {
override fun getOutlineFileName(location: Location): File = File("${location.path}.yml")
var outlineLevel = 0
override fun appendOutlineHeader(location: Location, node: DocumentationNode, to: StringBuilder) {
val indent = " ".repeat(outlineLevel)
to.appendln("$indent- title: ${languageService.renderName(node)}")
to.appendln("$indent url: ${generator.location(node).path}")
}
override fun appendOutlineLevel(to: StringBuilder, body: () -> Unit) {
val indent = " ".repeat(outlineLevel)
to.appendln("$indent content:")
outlineLevel++
body()
outlineLevel--
}
}
## Instruction:
Fix yaml outline generation to use relative paths
## Code After:
package org.jetbrains.dokka
import com.google.inject.Inject
import java.io.File
class YamlOutlineService @Inject constructor(
val generator: NodeLocationAwareGenerator,
val languageService: LanguageService
) : OutlineFormatService {
override fun getOutlineFileName(location: Location): File = File("${location.path}.yml")
var outlineLevel = 0
override fun appendOutlineHeader(location: Location, node: DocumentationNode, to: StringBuilder) {
val indent = " ".repeat(outlineLevel)
to.appendln("$indent- title: ${languageService.renderName(node)}")
to.appendln("$indent url: ${generator.relativePathToLocation(node.path.first(), node)}")
}
override fun appendOutlineLevel(to: StringBuilder, body: () -> Unit) {
val indent = " ".repeat(outlineLevel)
to.appendln("$indent content:")
outlineLevel++
body()
outlineLevel--
}
}
|
// ... existing code ...
override fun appendOutlineHeader(location: Location, node: DocumentationNode, to: StringBuilder) {
val indent = " ".repeat(outlineLevel)
to.appendln("$indent- title: ${languageService.renderName(node)}")
to.appendln("$indent url: ${generator.relativePathToLocation(node.path.first(), node)}")
}
override fun appendOutlineLevel(to: StringBuilder, body: () -> Unit) {
// ... rest of the code ...
|
b05d98bdc8e85bdff75234f37e86ba45329c31a2
|
src/main/java/com/github/jmchilton/blend4j/galaxy/beans/User.java
|
src/main/java/com/github/jmchilton/blend4j/galaxy/beans/User.java
|
package com.github.jmchilton.blend4j.galaxy.beans;
import org.codehaus.jackson.annotate.JsonProperty;
public class User extends GalaxyObject {
private String email;
public String getEmail() {
return email;
}
public void setEmail(final String email) {
this.email = email;
}
@JsonProperty("nice_total_disk_usage")
public void setNiceTotalDiskUsage(final String niceTotalDiskUsage) {
}
@JsonProperty("total_disk_usage")
public void setTotalDiskUsage(final String totalDiskUsage) {
}
public void setUsername(final String username) {
}
}
|
package com.github.jmchilton.blend4j.galaxy.beans;
import org.codehaus.jackson.annotate.JsonProperty;
public class User extends GalaxyObject {
private String email;
private String username;
public String getEmail() {
return email;
}
public void setEmail(final String email) {
this.email = email;
}
@JsonProperty("nice_total_disk_usage")
public void setNiceTotalDiskUsage(final String niceTotalDiskUsage) {
}
@JsonProperty("total_disk_usage")
public void setTotalDiskUsage(final String totalDiskUsage) {
}
public void setUsername(final String username) {
this.username = username;
}
public String getUsername() {
return username;
}
}
|
Include retrieval of username information
|
Include retrieval of username information
|
Java
|
epl-1.0
|
apetkau/blend4j,greenwoodma/blend4j,jmchilton/blend4j,greenwoodma/blend4j,apetkau/blend4j,biologghe/blend4j,jmchilton/blend4j
|
java
|
## Code Before:
package com.github.jmchilton.blend4j.galaxy.beans;
import org.codehaus.jackson.annotate.JsonProperty;
public class User extends GalaxyObject {
private String email;
public String getEmail() {
return email;
}
public void setEmail(final String email) {
this.email = email;
}
@JsonProperty("nice_total_disk_usage")
public void setNiceTotalDiskUsage(final String niceTotalDiskUsage) {
}
@JsonProperty("total_disk_usage")
public void setTotalDiskUsage(final String totalDiskUsage) {
}
public void setUsername(final String username) {
}
}
## Instruction:
Include retrieval of username information
## Code After:
package com.github.jmchilton.blend4j.galaxy.beans;
import org.codehaus.jackson.annotate.JsonProperty;
public class User extends GalaxyObject {
private String email;
private String username;
public String getEmail() {
return email;
}
public void setEmail(final String email) {
this.email = email;
}
@JsonProperty("nice_total_disk_usage")
public void setNiceTotalDiskUsage(final String niceTotalDiskUsage) {
}
@JsonProperty("total_disk_usage")
public void setTotalDiskUsage(final String totalDiskUsage) {
}
public void setUsername(final String username) {
this.username = username;
}
public String getUsername() {
return username;
}
}
|
# ... existing code ...
public class User extends GalaxyObject {
private String email;
private String username;
public String getEmail() {
return email;
# ... modified code ...
}
public void setUsername(final String username) {
this.username = username;
}
public String getUsername() {
return username;
}
}
# ... rest of the code ...
|
d15bc4a6a100181e39f5bc1514fa73cb270a6c95
|
opendaylight/adsal/protocol_plugins/stub/src/main/java/org/opendaylight/controller/protocol_plugins/stub/internal/StubNodeFactory.java
|
opendaylight/adsal/protocol_plugins/stub/src/main/java/org/opendaylight/controller/protocol_plugins/stub/internal/StubNodeFactory.java
|
/*
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.protocol_plugins.stub.internal;
import org.opendaylight.controller.sal.core.ConstructionException;
import org.opendaylight.controller.sal.utils.INodeFactory;
import org.opendaylight.controller.sal.core.Node;
public class StubNodeFactory implements INodeFactory
{
void init() {
}
/**
* Function called by the dependency manager when at least one dependency
* become unsatisfied or when the component is shutting down because for
* example bundle is being stopped.
*
*/
void destroy() {
}
/**
* Function called by dependency manager after "init ()" is called and after
* the services provided by the class are registered in the service registry
*
*/
void start() {
}
/**
* Function called by the dependency manager before the services exported by
* the component are unregistered, this will be followed by a "destroy ()"
* calls
*
*/
void stop() {
}
public Node fromString(String nodeType, String nodeId){
if(nodeType.equals("STUB"))
try{
return new Node("STUB", Integer.parseInt(nodeId));
} catch(ConstructionException e)
{
return null;
}
return null;
}
}
|
/*
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.protocol_plugins.stub.internal;
import org.opendaylight.controller.sal.core.ConstructionException;
import org.opendaylight.controller.sal.utils.INodeFactory;
import org.opendaylight.controller.sal.core.Node;
public class StubNodeFactory implements INodeFactory {
void init() {
}
/**
* Function called by the dependency manager when at least one dependency
* become unsatisfied or when the component is shutting down because for
* example bundle is being stopped.
*
*/
void destroy() {
}
/**
* Function called by dependency manager after "init ()" is called and after
* the services provided by the class are registered in the service registry
*
*/
void start() {
}
/**
* Function called by the dependency manager before the services exported by
* the component are unregistered, this will be followed by a "destroy ()"
* calls
*
*/
void stop() {
}
public Node fromString(String nodeType, String nodeId){
if(nodeType.equals("STUB")) {
try {
return new Node("STUB", Integer.parseInt(nodeId));
} catch(ConstructionException e) {
return null;
}
}
return null;
}
}
|
Fix checkstyle if-statements must use braces adsal/protocol_plugins
|
Fix checkstyle if-statements must use braces adsal/protocol_plugins
Change-Id: I2110c4623f141e738b368d0bce0695ebc98809ff
Signed-off-by: Thanh Ha <[email protected]>
|
Java
|
epl-1.0
|
Johnson-Chou/test,my76128/controller,aryantaheri/monitoring-controller,mandeepdhami/controller,mandeepdhami/controller,Johnson-Chou/test,522986491/controller,aryantaheri/monitoring-controller,tx1103mark/controller,my76128/controller,mandeepdhami/controller,aryantaheri/monitoring-controller,opendaylight/controller,tx1103mark/controller,Sushma7785/OpenDayLight-Load-Balancer,my76128/controller,aryantaheri/monitoring-controller,Sushma7785/OpenDayLight-Load-Balancer,522986491/controller,mandeepdhami/controller,tx1103mark/controller,tx1103mark/controller,inocybe/odl-controller,my76128/controller,inocybe/odl-controller
|
java
|
## Code Before:
/*
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.protocol_plugins.stub.internal;
import org.opendaylight.controller.sal.core.ConstructionException;
import org.opendaylight.controller.sal.utils.INodeFactory;
import org.opendaylight.controller.sal.core.Node;
public class StubNodeFactory implements INodeFactory
{
void init() {
}
/**
* Function called by the dependency manager when at least one dependency
* become unsatisfied or when the component is shutting down because for
* example bundle is being stopped.
*
*/
void destroy() {
}
/**
* Function called by dependency manager after "init ()" is called and after
* the services provided by the class are registered in the service registry
*
*/
void start() {
}
/**
* Function called by the dependency manager before the services exported by
* the component are unregistered, this will be followed by a "destroy ()"
* calls
*
*/
void stop() {
}
public Node fromString(String nodeType, String nodeId){
if(nodeType.equals("STUB"))
try{
return new Node("STUB", Integer.parseInt(nodeId));
} catch(ConstructionException e)
{
return null;
}
return null;
}
}
## Instruction:
Fix checkstyle if-statements must use braces adsal/protocol_plugins
Change-Id: I2110c4623f141e738b368d0bce0695ebc98809ff
Signed-off-by: Thanh Ha <[email protected]>
## Code After:
/*
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.protocol_plugins.stub.internal;
import org.opendaylight.controller.sal.core.ConstructionException;
import org.opendaylight.controller.sal.utils.INodeFactory;
import org.opendaylight.controller.sal.core.Node;
public class StubNodeFactory implements INodeFactory {
void init() {
}
/**
* Function called by the dependency manager when at least one dependency
* become unsatisfied or when the component is shutting down because for
* example bundle is being stopped.
*
*/
void destroy() {
}
/**
* Function called by dependency manager after "init ()" is called and after
* the services provided by the class are registered in the service registry
*
*/
void start() {
}
/**
* Function called by the dependency manager before the services exported by
* the component are unregistered, this will be followed by a "destroy ()"
* calls
*
*/
void stop() {
}
public Node fromString(String nodeType, String nodeId){
if(nodeType.equals("STUB")) {
try {
return new Node("STUB", Integer.parseInt(nodeId));
} catch(ConstructionException e) {
return null;
}
}
return null;
}
}
|
// ... existing code ...
import org.opendaylight.controller.sal.utils.INodeFactory;
import org.opendaylight.controller.sal.core.Node;
public class StubNodeFactory implements INodeFactory {
void init() {
}
/**
* Function called by the dependency manager when at least one dependency
* become unsatisfied or when the component is shutting down because for
* example bundle is being stopped.
*
*/
void destroy() {
}
/**
* Function called by dependency manager after "init ()" is called and after
* the services provided by the class are registered in the service registry
*
*/
void start() {
}
/**
* Function called by the dependency manager before the services exported by
* the component are unregistered, this will be followed by a "destroy ()"
* calls
*
*/
void stop() {
}
public Node fromString(String nodeType, String nodeId){
if(nodeType.equals("STUB")) {
try {
return new Node("STUB", Integer.parseInt(nodeId));
} catch(ConstructionException e) {
return null;
}
}
return null;
}
}
// ... rest of the code ...
|
a7dce25964cd740b0d0db86b255ede60c913e73d
|
chdb.py
|
chdb.py
|
import sqlite3
DB_FILENAME = 'citationhunt.sqlite3'
def init_db():
return sqlite3.connect(DB_FILENAME)
def reset_db():
db = init_db()
with db:
db.execute('''
DROP TABLE IF EXISTS categories
''')
db.execute('''
DROP TABLE IF EXISTS articles
''')
db.execute('''
DROP TABLE IF EXISTS snippets
''')
db.execute('''
DROP TABLE IF EXISTS articles_categories
''')
db.execute('''
CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT)
''')
db.execute('''
INSERT INTO categories VALUES ("unassigned", "unassigned")
''')
db.execute('''
CREATE TABLE articles_categories (article_id TEXT, category_id TEXT,
FOREIGN KEY(article_id) REFERENCES articles(page_id)
ON DELETE CASCADE,
FOREIGN KEY(category_id) REFERENCES categories(id)
ON DELETE CASCADE)
''')
db.execute('''
CREATE TABLE articles (page_id TEXT PRIMARY KEY, url TEXT,
title TEXT)
''')
db.execute('''
CREATE TABLE snippets (id TEXT PRIMARY KEY, snippet TEXT,
section TEXT, article_id TEXT, FOREIGN KEY(article_id)
REFERENCES articles(page_id) ON DELETE CASCADE)
''')
return db
def create_indices():
db = init_db()
db.execute('''CREATE INDEX IF NOT EXISTS snippets_articles
ON snippets(article_id);''')
|
import sqlite3
DB_FILENAME = 'citationhunt.sqlite3'
def init_db():
return sqlite3.connect(DB_FILENAME)
def reset_db():
db = init_db()
with db:
db.execute('''
DROP TABLE categories
''')
db.execute('''
DROP TABLE articles
''')
db.execute('''
DROP TABLE snippets
''')
db.execute('''
DROP TABLE articles_categories
''')
db.execute('''
CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT)
''')
db.execute('''
INSERT INTO categories VALUES ("unassigned", "unassigned")
''')
db.execute('''
CREATE TABLE articles_categories (article_id TEXT, category_id TEXT,
FOREIGN KEY(article_id) REFERENCES articles(page_id)
ON DELETE CASCADE,
FOREIGN KEY(category_id) REFERENCES categories(id)
ON DELETE CASCADE)
''')
db.execute('''
CREATE TABLE articles (page_id TEXT PRIMARY KEY, url TEXT,
title TEXT)
''')
db.execute('''
CREATE TABLE snippets (id TEXT PRIMARY KEY, snippet TEXT,
section TEXT, article_id TEXT, FOREIGN KEY(article_id)
REFERENCES articles(page_id) ON DELETE CASCADE)
''')
return db
def create_indices():
db = init_db()
db.execute('''CREATE INDEX IF NOT EXISTS snippets_articles
ON snippets(article_id);''')
|
Remove IF EXISTS from DROP TABLE when resetting the db.
|
Remove IF EXISTS from DROP TABLE when resetting the db.
|
Python
|
mit
|
Stryn/citationhunt,Stryn/citationhunt,jhsoby/citationhunt,Stryn/citationhunt,Stryn/citationhunt,jhsoby/citationhunt,jhsoby/citationhunt,jhsoby/citationhunt
|
python
|
## Code Before:
import sqlite3
DB_FILENAME = 'citationhunt.sqlite3'
def init_db():
return sqlite3.connect(DB_FILENAME)
def reset_db():
db = init_db()
with db:
db.execute('''
DROP TABLE IF EXISTS categories
''')
db.execute('''
DROP TABLE IF EXISTS articles
''')
db.execute('''
DROP TABLE IF EXISTS snippets
''')
db.execute('''
DROP TABLE IF EXISTS articles_categories
''')
db.execute('''
CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT)
''')
db.execute('''
INSERT INTO categories VALUES ("unassigned", "unassigned")
''')
db.execute('''
CREATE TABLE articles_categories (article_id TEXT, category_id TEXT,
FOREIGN KEY(article_id) REFERENCES articles(page_id)
ON DELETE CASCADE,
FOREIGN KEY(category_id) REFERENCES categories(id)
ON DELETE CASCADE)
''')
db.execute('''
CREATE TABLE articles (page_id TEXT PRIMARY KEY, url TEXT,
title TEXT)
''')
db.execute('''
CREATE TABLE snippets (id TEXT PRIMARY KEY, snippet TEXT,
section TEXT, article_id TEXT, FOREIGN KEY(article_id)
REFERENCES articles(page_id) ON DELETE CASCADE)
''')
return db
def create_indices():
db = init_db()
db.execute('''CREATE INDEX IF NOT EXISTS snippets_articles
ON snippets(article_id);''')
## Instruction:
Remove IF EXISTS from DROP TABLE when resetting the db.
## Code After:
import sqlite3
DB_FILENAME = 'citationhunt.sqlite3'
def init_db():
return sqlite3.connect(DB_FILENAME)
def reset_db():
db = init_db()
with db:
db.execute('''
DROP TABLE categories
''')
db.execute('''
DROP TABLE articles
''')
db.execute('''
DROP TABLE snippets
''')
db.execute('''
DROP TABLE articles_categories
''')
db.execute('''
CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT)
''')
db.execute('''
INSERT INTO categories VALUES ("unassigned", "unassigned")
''')
db.execute('''
CREATE TABLE articles_categories (article_id TEXT, category_id TEXT,
FOREIGN KEY(article_id) REFERENCES articles(page_id)
ON DELETE CASCADE,
FOREIGN KEY(category_id) REFERENCES categories(id)
ON DELETE CASCADE)
''')
db.execute('''
CREATE TABLE articles (page_id TEXT PRIMARY KEY, url TEXT,
title TEXT)
''')
db.execute('''
CREATE TABLE snippets (id TEXT PRIMARY KEY, snippet TEXT,
section TEXT, article_id TEXT, FOREIGN KEY(article_id)
REFERENCES articles(page_id) ON DELETE CASCADE)
''')
return db
def create_indices():
db = init_db()
db.execute('''CREATE INDEX IF NOT EXISTS snippets_articles
ON snippets(article_id);''')
|
# ... existing code ...
with db:
db.execute('''
DROP TABLE categories
''')
db.execute('''
DROP TABLE articles
''')
db.execute('''
DROP TABLE snippets
''')
db.execute('''
DROP TABLE articles_categories
''')
db.execute('''
CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT)
# ... rest of the code ...
|
84865dd187f15e5fb291a969d245deb21542bd65
|
openex-api/src/main/java/io/openex/config/AppConfig.java
|
openex-api/src/main/java/io/openex/config/AppConfig.java
|
package io.openex.config;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.util.StdDateFormat;
import com.fasterxml.jackson.datatype.hibernate5.Hibernate5Module;
import io.openex.database.model.User;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
@Component
@EnableAsync
public class AppConfig {
private final static String ANONYMOUS_USER = "anonymousUser";
// Validations
public final static String MANDATORY_MESSAGE = "This value should not be blank.";
public static User currentUser() {
Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal();
if (ANONYMOUS_USER.equals(principal)) {
User anonymousUser = new User();
anonymousUser.setId("anonymous");
anonymousUser.setEmail("[email protected]");
return anonymousUser;
}
assert principal instanceof User;
return (User) principal;
}
@Bean
ObjectMapper openexJsonMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setDateFormat(new StdDateFormat().withColonInTimeZone(true));
mapper.registerModule(new Hibernate5Module());
return mapper;
}
}
|
package io.openex.config;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.util.StdDateFormat;
import com.fasterxml.jackson.datatype.hibernate5.Hibernate5Module;
import io.openex.database.model.User;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
@Component
@EnableAsync
public class AppConfig {
private final static String ANONYMOUS_USER = "anonymousUser";
// Validations
public final static String MANDATORY_MESSAGE = "This value should not be blank.";
public static User currentUser() {
Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal();
if (ANONYMOUS_USER.equals(principal)) {
User anonymousUser = new User();
anonymousUser.setId("anonymous");
anonymousUser.setEmail("[email protected]");
return anonymousUser;
}
assert principal instanceof User;
return (User) principal;
}
public static void updateSessionUser(User user) {
Authentication authentication = new UsernamePasswordAuthenticationToken(user, user.getPassword(), user.getAuthorities());
SecurityContextHolder.getContext().setAuthentication(authentication);
}
@Bean
ObjectMapper openexJsonMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setDateFormat(new StdDateFormat().withColonInTimeZone(true));
mapper.registerModule(new Hibernate5Module());
return mapper;
}
}
|
Update user session profile when self updating
|
[api] Update user session profile when self updating
|
Java
|
mit
|
Luatix/OpenEx,Luatix/OpenEx
|
java
|
## Code Before:
package io.openex.config;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.util.StdDateFormat;
import com.fasterxml.jackson.datatype.hibernate5.Hibernate5Module;
import io.openex.database.model.User;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
@Component
@EnableAsync
public class AppConfig {
private final static String ANONYMOUS_USER = "anonymousUser";
// Validations
public final static String MANDATORY_MESSAGE = "This value should not be blank.";
public static User currentUser() {
Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal();
if (ANONYMOUS_USER.equals(principal)) {
User anonymousUser = new User();
anonymousUser.setId("anonymous");
anonymousUser.setEmail("[email protected]");
return anonymousUser;
}
assert principal instanceof User;
return (User) principal;
}
@Bean
ObjectMapper openexJsonMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setDateFormat(new StdDateFormat().withColonInTimeZone(true));
mapper.registerModule(new Hibernate5Module());
return mapper;
}
}
## Instruction:
[api] Update user session profile when self updating
## Code After:
package io.openex.config;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.util.StdDateFormat;
import com.fasterxml.jackson.datatype.hibernate5.Hibernate5Module;
import io.openex.database.model.User;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
@Component
@EnableAsync
public class AppConfig {
private final static String ANONYMOUS_USER = "anonymousUser";
// Validations
public final static String MANDATORY_MESSAGE = "This value should not be blank.";
public static User currentUser() {
Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal();
if (ANONYMOUS_USER.equals(principal)) {
User anonymousUser = new User();
anonymousUser.setId("anonymous");
anonymousUser.setEmail("[email protected]");
return anonymousUser;
}
assert principal instanceof User;
return (User) principal;
}
public static void updateSessionUser(User user) {
Authentication authentication = new UsernamePasswordAuthenticationToken(user, user.getPassword(), user.getAuthorities());
SecurityContextHolder.getContext().setAuthentication(authentication);
}
@Bean
ObjectMapper openexJsonMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setDateFormat(new StdDateFormat().withColonInTimeZone(true));
mapper.registerModule(new Hibernate5Module());
return mapper;
}
}
|
...
import io.openex.database.model.User;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
...
return (User) principal;
}
public static void updateSessionUser(User user) {
Authentication authentication = new UsernamePasswordAuthenticationToken(user, user.getPassword(), user.getAuthorities());
SecurityContextHolder.getContext().setAuthentication(authentication);
}
@Bean
ObjectMapper openexJsonMapper() {
ObjectMapper mapper = new ObjectMapper();
...
|
9a84ffde3909c74a47049c65e3b2bb5038a2cfaa
|
sillymap/burrows_wheeler.py
|
sillymap/burrows_wheeler.py
|
def burrows_wheeler(text):
"""Returns the burrows wheeler transform of <text>.
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append(text[i:] + text[:i])
all_permutations.sort()
return "".join([w[-1] for w in all_permutations])
|
def burrows_wheeler(text):
"""Calculates the burrows wheeler transform of <text>.
returns the burrows wheeler string and the suffix array indices
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append((text[i:] + text[:i],i))
all_permutations.sort()
bw_l = [] # burrows wheeler as list
sa_i = [] # suffix array indices
for w,j in all_permutations:
bw_l.append(w[-1])
sa_i.append(j)
return "".join(bw_l), sa_i
|
Return suffix array indices from burrows wheeler
|
Return suffix array indices from burrows wheeler
|
Python
|
mit
|
alneberg/sillymap
|
python
|
## Code Before:
def burrows_wheeler(text):
"""Returns the burrows wheeler transform of <text>.
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append(text[i:] + text[:i])
all_permutations.sort()
return "".join([w[-1] for w in all_permutations])
## Instruction:
Return suffix array indices from burrows wheeler
## Code After:
def burrows_wheeler(text):
"""Calculates the burrows wheeler transform of <text>.
returns the burrows wheeler string and the suffix array indices
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append((text[i:] + text[:i],i))
all_permutations.sort()
bw_l = [] # burrows wheeler as list
sa_i = [] # suffix array indices
for w,j in all_permutations:
bw_l.append(w[-1])
sa_i.append(j)
return "".join(bw_l), sa_i
|
// ... existing code ...
def burrows_wheeler(text):
"""Calculates the burrows wheeler transform of <text>.
returns the burrows wheeler string and the suffix array indices
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append((text[i:] + text[:i],i))
all_permutations.sort()
bw_l = [] # burrows wheeler as list
sa_i = [] # suffix array indices
for w,j in all_permutations:
bw_l.append(w[-1])
sa_i.append(j)
return "".join(bw_l), sa_i
// ... rest of the code ...
|
d59cd4e35b8c75fc053d565698518fa7fdffa3ae
|
src/test/java/org/yaml/snakeyaml/YamlTest.java
|
src/test/java/org/yaml/snakeyaml/YamlTest.java
|
/**
* Copyright (c) 2008-2010, http://www.snakeyaml.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yaml.snakeyaml;
import junit.framework.TestCase;
public class YamlTest extends TestCase {
public void testSetNoName() {
Yaml yaml = new Yaml();
assertTrue(yaml.toString().matches("Yaml:\\d+"));
}
public void testSetName() {
Yaml yaml = new Yaml();
yaml.setName("REST");
assertEquals("REST", yaml.getName());
assertEquals("REST", yaml.toString());
}
}
|
/**
* Copyright (c) 2008-2010, http://www.snakeyaml.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yaml.snakeyaml;
import java.util.Iterator;
import junit.framework.TestCase;
public class YamlTest extends TestCase {
public void testSetNoName() {
Yaml yaml = new Yaml();
assertTrue(yaml.toString().matches("Yaml:\\d+"));
}
public void testSetName() {
Yaml yaml = new Yaml();
yaml.setName("REST");
assertEquals("REST", yaml.getName());
assertEquals("REST", yaml.toString());
}
/**
* Check that documents are parsed only when they are asked to be loaded.
*/
public void testOneDocument() {
Yaml yaml = new Yaml();
String doc = "--- a\n--- [:]";
Iterator<Object> loaded = yaml.loadAll(doc).iterator();
assertTrue(loaded.hasNext());
Object obj1 = loaded.next();
assertEquals("a", obj1);
assertTrue(loaded.hasNext());
try {
loaded.next();
fail("Second document is invalid");
} catch (Exception e) {
assertEquals("while parsing a flow node; expected the node content, but found Value", e
.getMessage());
}
}
}
|
Add test: check that documents are parsed only when they are asked to be loaded
|
Add test: check that documents are parsed only when they are asked to be loaded
|
Java
|
apache-2.0
|
yskumar007/snakeyaml,umeding/snakeyaml,Mause/snakeyaml,nxbdi/snakeyaml,thinkofdeath/snakeyaml,AssafMashiah/assafmashiah-yaml,youxinren/snakeyaml,votinhthieugia/snakeyaml,yeison/snakeyaml,Fighter777/snakeyaml,UcasRichard/snakeyaml,yishenggudou/snakeyaml,virustotalop/snakeyaml,mapiman/snakeyaml,RussellSpitzer/snakeyaml,SorinS/snakeyaml,ilantoren/snakeyaml,cuizhennan/snakeyaml,marcuskara/snakeyaml,ldh0826/snakeyaml,jeremypepper/snakeyaml,mc-nekoneko/snakeyaml,Akkrite/snakeyaml,joy32812/snakeyaml
|
java
|
## Code Before:
/**
* Copyright (c) 2008-2010, http://www.snakeyaml.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yaml.snakeyaml;
import junit.framework.TestCase;
public class YamlTest extends TestCase {
public void testSetNoName() {
Yaml yaml = new Yaml();
assertTrue(yaml.toString().matches("Yaml:\\d+"));
}
public void testSetName() {
Yaml yaml = new Yaml();
yaml.setName("REST");
assertEquals("REST", yaml.getName());
assertEquals("REST", yaml.toString());
}
}
## Instruction:
Add test: check that documents are parsed only when they are asked to be loaded
## Code After:
/**
* Copyright (c) 2008-2010, http://www.snakeyaml.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yaml.snakeyaml;
import java.util.Iterator;
import junit.framework.TestCase;
public class YamlTest extends TestCase {
public void testSetNoName() {
Yaml yaml = new Yaml();
assertTrue(yaml.toString().matches("Yaml:\\d+"));
}
public void testSetName() {
Yaml yaml = new Yaml();
yaml.setName("REST");
assertEquals("REST", yaml.getName());
assertEquals("REST", yaml.toString());
}
/**
* Check that documents are parsed only when they are asked to be loaded.
*/
public void testOneDocument() {
Yaml yaml = new Yaml();
String doc = "--- a\n--- [:]";
Iterator<Object> loaded = yaml.loadAll(doc).iterator();
assertTrue(loaded.hasNext());
Object obj1 = loaded.next();
assertEquals("a", obj1);
assertTrue(loaded.hasNext());
try {
loaded.next();
fail("Second document is invalid");
} catch (Exception e) {
assertEquals("while parsing a flow node; expected the node content, but found Value", e
.getMessage());
}
}
}
|
# ... existing code ...
package org.yaml.snakeyaml;
import java.util.Iterator;
import junit.framework.TestCase;
public class YamlTest extends TestCase {
# ... modified code ...
assertEquals("REST", yaml.getName());
assertEquals("REST", yaml.toString());
}
/**
* Check that documents are parsed only when they are asked to be loaded.
*/
public void testOneDocument() {
Yaml yaml = new Yaml();
String doc = "--- a\n--- [:]";
Iterator<Object> loaded = yaml.loadAll(doc).iterator();
assertTrue(loaded.hasNext());
Object obj1 = loaded.next();
assertEquals("a", obj1);
assertTrue(loaded.hasNext());
try {
loaded.next();
fail("Second document is invalid");
} catch (Exception e) {
assertEquals("while parsing a flow node; expected the node content, but found Value", e
.getMessage());
}
}
}
# ... rest of the code ...
|
25e2c37bb9dc17f0c10ae744b1554b94c4e5a7ff
|
doj/monkey/__init__.py
|
doj/monkey/__init__.py
|
import doj.monkey.django_utils_functional_lazy
import doj.monkey.django_http_response_streaminghttpresponse
import doj.monkey.inspect_getcallargs
def install_monkey_patches():
doj.monkey.django_utils_functional_lazy.install()
doj.monkey.django_http_response_streaminghttpresponse.install()
doj.monkey.inspect_getcallargs.install()
|
import doj.monkey.django_utils_functional_lazy
import doj.monkey.django_http_response_streaminghttpresponse
import doj.monkey.inspect_getcallargs
def install_monkey_patches():
# Make sure we install monkey patches only once
if not getattr(install_monkey_patches, 'installed', False):
setattr(install_monkey_patches, 'installed', True)
doj.monkey.django_utils_functional_lazy.install()
doj.monkey.django_http_response_streaminghttpresponse.install()
doj.monkey.inspect_getcallargs.install()
|
Make sure we install monkey patches only once
|
Make sure we install monkey patches only once
|
Python
|
bsd-3-clause
|
beachmachine/django-jython
|
python
|
## Code Before:
import doj.monkey.django_utils_functional_lazy
import doj.monkey.django_http_response_streaminghttpresponse
import doj.monkey.inspect_getcallargs
def install_monkey_patches():
doj.monkey.django_utils_functional_lazy.install()
doj.monkey.django_http_response_streaminghttpresponse.install()
doj.monkey.inspect_getcallargs.install()
## Instruction:
Make sure we install monkey patches only once
## Code After:
import doj.monkey.django_utils_functional_lazy
import doj.monkey.django_http_response_streaminghttpresponse
import doj.monkey.inspect_getcallargs
def install_monkey_patches():
# Make sure we install monkey patches only once
if not getattr(install_monkey_patches, 'installed', False):
setattr(install_monkey_patches, 'installed', True)
doj.monkey.django_utils_functional_lazy.install()
doj.monkey.django_http_response_streaminghttpresponse.install()
doj.monkey.inspect_getcallargs.install()
|
...
def install_monkey_patches():
# Make sure we install monkey patches only once
if not getattr(install_monkey_patches, 'installed', False):
setattr(install_monkey_patches, 'installed', True)
doj.monkey.django_utils_functional_lazy.install()
doj.monkey.django_http_response_streaminghttpresponse.install()
doj.monkey.inspect_getcallargs.install()
...
|
e00a1a4ce49ad353e25b1ff9eeee2dafd854ddfc
|
src/python/helpers/python_wrap_const_shared_ptr.h
|
src/python/helpers/python_wrap_const_shared_ptr.h
|
/*ckwg +5
* Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
* KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
* Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
*/
#ifndef VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
#define VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
#include <boost/python/pointee.hpp>
#include <boost/shared_ptr.hpp>
// Retrieved from http://mail.python.org/pipermail/cplusplus-sig/2006-November/011329.html
namespace boost
{
namespace python
{
template <typename T>
inline
T*
get_pointer(boost::shared_ptr<T const> const& p)
{
return const_cast<T*>(p.get());
}
template <typename T>
struct pointee<boost::shared_ptr<T const> >
{
typedef T type;
};
}
}
#endif // VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
|
/*ckwg +5
* Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
* KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
* Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
*/
#ifndef VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
#define VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
#include <boost/python/pointee.hpp>
#include <boost/get_pointer.hpp>
#include <boost/shared_ptr.hpp>
// Retrieved from http://mail.python.org/pipermail/cplusplus-sig/2006-November/011329.html
namespace boost
{
namespace python
{
template <typename T>
inline
T*
get_pointer(boost::shared_ptr<T const> const& p)
{
return const_cast<T*>(p.get());
}
template <typename T>
struct pointee<boost::shared_ptr<T const> >
{
typedef T type;
};
// Don't hide other get_pointer instances.
using boost::python::get_pointer;
using boost::get_pointer;
}
}
#endif // VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
|
Bring other get_pointer instances into the scope
|
Bring other get_pointer instances into the scope
|
C
|
bsd-3-clause
|
Kitware/sprokit,linus-sherrill/sprokit,mathstuf/sprokit,linus-sherrill/sprokit,Kitware/sprokit,linus-sherrill/sprokit,Kitware/sprokit,linus-sherrill/sprokit,Kitware/sprokit,mathstuf/sprokit,mathstuf/sprokit,mathstuf/sprokit
|
c
|
## Code Before:
/*ckwg +5
* Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
* KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
* Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
*/
#ifndef VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
#define VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
#include <boost/python/pointee.hpp>
#include <boost/shared_ptr.hpp>
// Retrieved from http://mail.python.org/pipermail/cplusplus-sig/2006-November/011329.html
namespace boost
{
namespace python
{
template <typename T>
inline
T*
get_pointer(boost::shared_ptr<T const> const& p)
{
return const_cast<T*>(p.get());
}
template <typename T>
struct pointee<boost::shared_ptr<T const> >
{
typedef T type;
};
}
}
#endif // VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
## Instruction:
Bring other get_pointer instances into the scope
## Code After:
/*ckwg +5
* Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
* KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
* Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
*/
#ifndef VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
#define VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
#include <boost/python/pointee.hpp>
#include <boost/get_pointer.hpp>
#include <boost/shared_ptr.hpp>
// Retrieved from http://mail.python.org/pipermail/cplusplus-sig/2006-November/011329.html
namespace boost
{
namespace python
{
template <typename T>
inline
T*
get_pointer(boost::shared_ptr<T const> const& p)
{
return const_cast<T*>(p.get());
}
template <typename T>
struct pointee<boost::shared_ptr<T const> >
{
typedef T type;
};
// Don't hide other get_pointer instances.
using boost::python::get_pointer;
using boost::get_pointer;
}
}
#endif // VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
|
# ... existing code ...
#define VISTK_PYTHON_HELPERS_PYTHON_WRAP_CONST_SHARED_PTR_H
#include <boost/python/pointee.hpp>
#include <boost/get_pointer.hpp>
#include <boost/shared_ptr.hpp>
// Retrieved from http://mail.python.org/pipermail/cplusplus-sig/2006-November/011329.html
# ... modified code ...
typedef T type;
};
// Don't hide other get_pointer instances.
using boost::python::get_pointer;
using boost::get_pointer;
}
}
# ... rest of the code ...
|
d952bf54fe5be958b735a3285a3f153b362aace1
|
EVGEN/EVGENLinkDef.h
|
EVGEN/EVGENLinkDef.h
|
/* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
* See cxx source for full Copyright notice */
/* $Id$ */
#pragma link off all globals;
#pragma link off all classes;
#pragma link off all functions;
#pragma link C++ enum Process_t;
#pragma link C++ enum Decay_t;
#pragma link C++ enum StrucFunc_t;
#pragma link C++ enum Param_t;
#pragma link C++ class AliGenHIJINGpara;
#pragma link C++ class AliGenFixed;
#pragma link C++ class AliGenBox;
#pragma link C++ class AliGenParam;
#pragma link C++ class AliGenPythia;
#pragma link C++ class AliGenCocktail-;
#pragma link C++ class AliGenCocktailEntry;
#pragma link C++ class AliGenExtFile;
#pragma link C++ class AliGenScan;
#pragma link C++ class AliPythia;
#pragma link C++ class AliGenMUONlib;
#pragma link C++ class AliGenFLUKAsource;
#pragma link C++ class AliGenHalo;
#pragma link C++ class AliDimuCombinator;
#endif
|
/* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
* See cxx source for full Copyright notice */
/* $Id$ */
#pragma link off all globals;
#pragma link off all classes;
#pragma link off all functions;
#pragma link C++ enum Process_t;
#pragma link C++ enum Decay_t;
#pragma link C++ enum StrucFunc_t;
#pragma link C++ enum Param_t;
#pragma link C++ enum Weighting_t;
#pragma link C++ class AliGenHIJINGpara;
#pragma link C++ class AliGenFixed;
#pragma link C++ class AliGenBox;
#pragma link C++ class AliGenParam;
#pragma link C++ class AliGenPythia;
#pragma link C++ class AliGenCocktail-;
#pragma link C++ class AliGenCocktailEntry;
#pragma link C++ class AliGenExtFile;
#pragma link C++ class AliGenScan;
#pragma link C++ class AliPythia;
#pragma link C++ class AliGenMUONlib;
#pragma link C++ class AliGenFLUKAsource;
#pragma link C++ class AliGenHalo;
#pragma link C++ class AliDimuCombinator;
#endif
|
Add Weighting_t in the LinkDef
|
Add Weighting_t in the LinkDef
|
C
|
bsd-3-clause
|
shahor02/AliRoot,mkrzewic/AliRoot,mkrzewic/AliRoot,mkrzewic/AliRoot,sebaleh/AliRoot,miranov25/AliRoot,ALICEHLT/AliRoot,shahor02/AliRoot,ecalvovi/AliRoot,ecalvovi/AliRoot,alisw/AliRoot,mkrzewic/AliRoot,sebaleh/AliRoot,alisw/AliRoot,miranov25/AliRoot,sebaleh/AliRoot,coppedis/AliRoot,miranov25/AliRoot,coppedis/AliRoot,jgrosseo/AliRoot,sebaleh/AliRoot,miranov25/AliRoot,ALICEHLT/AliRoot,miranov25/AliRoot,ecalvovi/AliRoot,ALICEHLT/AliRoot,shahor02/AliRoot,shahor02/AliRoot,ALICEHLT/AliRoot,miranov25/AliRoot,alisw/AliRoot,sebaleh/AliRoot,coppedis/AliRoot,jgrosseo/AliRoot,shahor02/AliRoot,jgrosseo/AliRoot,jgrosseo/AliRoot,alisw/AliRoot,mkrzewic/AliRoot,ecalvovi/AliRoot,miranov25/AliRoot,alisw/AliRoot,ecalvovi/AliRoot,coppedis/AliRoot,sebaleh/AliRoot,ALICEHLT/AliRoot,alisw/AliRoot,coppedis/AliRoot,alisw/AliRoot,alisw/AliRoot,coppedis/AliRoot,coppedis/AliRoot,ecalvovi/AliRoot,mkrzewic/AliRoot,ALICEHLT/AliRoot,mkrzewic/AliRoot,ecalvovi/AliRoot,jgrosseo/AliRoot,jgrosseo/AliRoot,coppedis/AliRoot,miranov25/AliRoot,ALICEHLT/AliRoot,shahor02/AliRoot,jgrosseo/AliRoot,shahor02/AliRoot,sebaleh/AliRoot
|
c
|
## Code Before:
/* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
* See cxx source for full Copyright notice */
/* $Id$ */
#pragma link off all globals;
#pragma link off all classes;
#pragma link off all functions;
#pragma link C++ enum Process_t;
#pragma link C++ enum Decay_t;
#pragma link C++ enum StrucFunc_t;
#pragma link C++ enum Param_t;
#pragma link C++ class AliGenHIJINGpara;
#pragma link C++ class AliGenFixed;
#pragma link C++ class AliGenBox;
#pragma link C++ class AliGenParam;
#pragma link C++ class AliGenPythia;
#pragma link C++ class AliGenCocktail-;
#pragma link C++ class AliGenCocktailEntry;
#pragma link C++ class AliGenExtFile;
#pragma link C++ class AliGenScan;
#pragma link C++ class AliPythia;
#pragma link C++ class AliGenMUONlib;
#pragma link C++ class AliGenFLUKAsource;
#pragma link C++ class AliGenHalo;
#pragma link C++ class AliDimuCombinator;
#endif
## Instruction:
Add Weighting_t in the LinkDef
## Code After:
/* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
* See cxx source for full Copyright notice */
/* $Id$ */
#pragma link off all globals;
#pragma link off all classes;
#pragma link off all functions;
#pragma link C++ enum Process_t;
#pragma link C++ enum Decay_t;
#pragma link C++ enum StrucFunc_t;
#pragma link C++ enum Param_t;
#pragma link C++ enum Weighting_t;
#pragma link C++ class AliGenHIJINGpara;
#pragma link C++ class AliGenFixed;
#pragma link C++ class AliGenBox;
#pragma link C++ class AliGenParam;
#pragma link C++ class AliGenPythia;
#pragma link C++ class AliGenCocktail-;
#pragma link C++ class AliGenCocktailEntry;
#pragma link C++ class AliGenExtFile;
#pragma link C++ class AliGenScan;
#pragma link C++ class AliPythia;
#pragma link C++ class AliGenMUONlib;
#pragma link C++ class AliGenFLUKAsource;
#pragma link C++ class AliGenHalo;
#pragma link C++ class AliDimuCombinator;
#endif
|
# ... existing code ...
#pragma link C++ enum Decay_t;
#pragma link C++ enum StrucFunc_t;
#pragma link C++ enum Param_t;
#pragma link C++ enum Weighting_t;
#pragma link C++ class AliGenHIJINGpara;
#pragma link C++ class AliGenFixed;
# ... rest of the code ...
|
a78445cfada5cc1f77a7887dc5241071bef69989
|
compass/tests/test_models.py
|
compass/tests/test_models.py
|
from django.test import TestCase
from compass.models import (Category,
Book)
class CategoryTestCase(TestCase):
def test_can_add_category(self,):
Category.create(title="Mock Category")
self.assertEqual(Category.find("Mock Category").count(), 1)
class BookTestCase(TestCase):
def test_can_add_book(self):
category = Category.create(title="Mock Category")
Book.create(title="Mock Book", category=category)
self.assertEqual(Book.find("Mock Book").count(), 1)
|
from django.test import TestCase
from compass.models import (Category,
Book, Compass)
class CategoryTestCase(TestCase):
def test_can_add_category(self,):
Category.create(title="Mock Category")
self.assertEqual(Category.find("Mock Category").count(), 1)
class BookTestCase(TestCase):
def test_can_add_book(self):
category = Category.create(title="Mock Category")
Book.create(title="Mock Book", category=category)
self.assertEqual(Book.find("Mock Book").count(), 1)
class CompassTestCase(TestCase):
def test_correct_title_if_not_title_and_category(self,):
heading = Compass.heading(title="", category="")
self.assertEqual(heading, "All books")
def test_correct_title_if_not_category(self,):
heading = Compass.heading(title="Title 1", category="")
self.assertEqual(heading, "All book titles like Title 1")
def test_correct_title_if_not_title(self,):
heading = Compass.heading(title="", category="Category 1")
self.assertEqual(heading, "All book titles under Category 1")
|
Test correct heading returned in search results
|
Test correct heading returned in search results
|
Python
|
mit
|
andela-osule/bookworm,andela-osule/bookworm
|
python
|
## Code Before:
from django.test import TestCase
from compass.models import (Category,
Book)
class CategoryTestCase(TestCase):
def test_can_add_category(self,):
Category.create(title="Mock Category")
self.assertEqual(Category.find("Mock Category").count(), 1)
class BookTestCase(TestCase):
def test_can_add_book(self):
category = Category.create(title="Mock Category")
Book.create(title="Mock Book", category=category)
self.assertEqual(Book.find("Mock Book").count(), 1)
## Instruction:
Test correct heading returned in search results
## Code After:
from django.test import TestCase
from compass.models import (Category,
Book, Compass)
class CategoryTestCase(TestCase):
def test_can_add_category(self,):
Category.create(title="Mock Category")
self.assertEqual(Category.find("Mock Category").count(), 1)
class BookTestCase(TestCase):
def test_can_add_book(self):
category = Category.create(title="Mock Category")
Book.create(title="Mock Book", category=category)
self.assertEqual(Book.find("Mock Book").count(), 1)
class CompassTestCase(TestCase):
def test_correct_title_if_not_title_and_category(self,):
heading = Compass.heading(title="", category="")
self.assertEqual(heading, "All books")
def test_correct_title_if_not_category(self,):
heading = Compass.heading(title="Title 1", category="")
self.assertEqual(heading, "All book titles like Title 1")
def test_correct_title_if_not_title(self,):
heading = Compass.heading(title="", category="Category 1")
self.assertEqual(heading, "All book titles under Category 1")
|
...
from django.test import TestCase
from compass.models import (Category,
Book, Compass)
class CategoryTestCase(TestCase):
...
category = Category.create(title="Mock Category")
Book.create(title="Mock Book", category=category)
self.assertEqual(Book.find("Mock Book").count(), 1)
class CompassTestCase(TestCase):
def test_correct_title_if_not_title_and_category(self,):
heading = Compass.heading(title="", category="")
self.assertEqual(heading, "All books")
def test_correct_title_if_not_category(self,):
heading = Compass.heading(title="Title 1", category="")
self.assertEqual(heading, "All book titles like Title 1")
def test_correct_title_if_not_title(self,):
heading = Compass.heading(title="", category="Category 1")
self.assertEqual(heading, "All book titles under Category 1")
...
|
a7ed3ae2aedcb74a351b850dfa035adda08ea05b
|
__init__.py
|
__init__.py
|
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return {
'type': 'backend',
'plugin': {
'name': "CuraEngine Backend"
}
}
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
Update plugin metadata to the new format
|
Update plugin metadata to the new format
|
Python
|
agpl-3.0
|
ad1217/Cura,derekhe/Cura,hmflash/Cura,fieldOfView/Cura,totalretribution/Cura,senttech/Cura,bq/Ultimaker-Cura,DeskboxBrazil/Cura,totalretribution/Cura,quillford/Cura,ynotstartups/Wanhao,lo0ol/Ultimaker-Cura,Curahelper/Cura,hmflash/Cura,derekhe/Cura,quillford/Cura,lo0ol/Ultimaker-Cura,fxtentacle/Cura,ad1217/Cura,DeskboxBrazil/Cura,fieldOfView/Cura,ynotstartups/Wanhao,fxtentacle/Cura,senttech/Cura,markwal/Cura,markwal/Cura,bq/Ultimaker-Cura,Curahelper/Cura
|
python
|
## Code Before:
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
## Instruction:
Update plugin metadata to the new format
## Code After:
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return {
'type': 'backend',
'plugin': {
'name': "CuraEngine Backend"
}
}
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
...
from UM.Preferences import Preferences
def getMetaData():
return {
'type': 'backend',
'plugin': {
'name': "CuraEngine Backend"
}
}
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
...
|
29409a5abac1ea19afc9b0cdda3061216047ffeb
|
d02/d02s05/d02s05e02-real-database/src/main/java/net/safedata/springboot/training/d02/s05/config/DataSourceConfig.java
|
d02/d02s05/d02s05e02-real-database/src/main/java/net/safedata/springboot/training/d02/s05/config/DataSourceConfig.java
|
package net.safedata.springboot.training.d02.s05.config;
import net.safedata.springboot.training.d02.s05.model.Product;
import net.safedata.springboot.training.d02.s05.service.ProductService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.annotation.PostConstruct;
/**
* A simple {@link javax.sql.DataSource} configuration, which:
* <ul>
* <li>configures the JPA repositories, using the {@link EnableJpaRepositories} annotation</li>
* <li>inserts a simple {@link Product} in the database, using the auto-configured {@link javax.sql.DataSource}</li>
* </ul>
*/
@Configuration
@EnableJpaRepositories(basePackages = "net.safedata.springboot.training.d02.s05.repository")
@EnableTransactionManagement
public class DataSourceConfig {
private final ProductService productService;
@Autowired
public DataSourceConfig(final ProductService productService) {
this.productService = productService;
}
@PostConstruct
public void init() {
final Product product = new Product();
product.setName("A default product");
productService.create(product);
}
}
|
package net.safedata.springboot.training.d02.s05.config;
import net.safedata.springboot.training.d02.s05.model.Product;
import net.safedata.springboot.training.d02.s05.service.ProductService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.annotation.PostConstruct;
import java.util.Random;
import java.util.stream.IntStream;
/**
* A simple {@link javax.sql.DataSource} configuration, which:
* <ul>
* <li>configures the JPA repositories, using the {@link EnableJpaRepositories} annotation</li>
* <li>inserts a simple {@link Product} in the database, using the auto-configured {@link javax.sql.DataSource}</li>
* </ul>
*/
@Configuration
@EnableJpaRepositories(basePackages = "net.safedata.springboot.training.d02.s05.repository")
@EnableTransactionManagement
public class DataSourceConfig {
private static final Random RANDOM = new Random(200);
private final ProductService productService;
@Autowired
public DataSourceConfig(final ProductService productService) {
this.productService = productService;
}
@PostConstruct
public void init() {
IntStream.range(0, 20)
.forEach(value -> {
final Product product = new Product();
product.setName("A default product with the ID " + RANDOM.nextInt());
productService.create(product);
});
}
}
|
Insert several products in the database
|
[improve] Insert several products in the database
|
Java
|
apache-2.0
|
bogdansolga/spring-boot-training,bogdansolga/spring-boot-training
|
java
|
## Code Before:
package net.safedata.springboot.training.d02.s05.config;
import net.safedata.springboot.training.d02.s05.model.Product;
import net.safedata.springboot.training.d02.s05.service.ProductService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.annotation.PostConstruct;
/**
* A simple {@link javax.sql.DataSource} configuration, which:
* <ul>
* <li>configures the JPA repositories, using the {@link EnableJpaRepositories} annotation</li>
* <li>inserts a simple {@link Product} in the database, using the auto-configured {@link javax.sql.DataSource}</li>
* </ul>
*/
@Configuration
@EnableJpaRepositories(basePackages = "net.safedata.springboot.training.d02.s05.repository")
@EnableTransactionManagement
public class DataSourceConfig {
private final ProductService productService;
@Autowired
public DataSourceConfig(final ProductService productService) {
this.productService = productService;
}
@PostConstruct
public void init() {
final Product product = new Product();
product.setName("A default product");
productService.create(product);
}
}
## Instruction:
[improve] Insert several products in the database
## Code After:
package net.safedata.springboot.training.d02.s05.config;
import net.safedata.springboot.training.d02.s05.model.Product;
import net.safedata.springboot.training.d02.s05.service.ProductService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.annotation.PostConstruct;
import java.util.Random;
import java.util.stream.IntStream;
/**
* A simple {@link javax.sql.DataSource} configuration, which:
* <ul>
* <li>configures the JPA repositories, using the {@link EnableJpaRepositories} annotation</li>
* <li>inserts a simple {@link Product} in the database, using the auto-configured {@link javax.sql.DataSource}</li>
* </ul>
*/
@Configuration
@EnableJpaRepositories(basePackages = "net.safedata.springboot.training.d02.s05.repository")
@EnableTransactionManagement
public class DataSourceConfig {
private static final Random RANDOM = new Random(200);
private final ProductService productService;
@Autowired
public DataSourceConfig(final ProductService productService) {
this.productService = productService;
}
@PostConstruct
public void init() {
IntStream.range(0, 20)
.forEach(value -> {
final Product product = new Product();
product.setName("A default product with the ID " + RANDOM.nextInt());
productService.create(product);
});
}
}
|
# ... existing code ...
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.annotation.PostConstruct;
import java.util.Random;
import java.util.stream.IntStream;
/**
* A simple {@link javax.sql.DataSource} configuration, which:
# ... modified code ...
@EnableTransactionManagement
public class DataSourceConfig {
private static final Random RANDOM = new Random(200);
private final ProductService productService;
@Autowired
...
@PostConstruct
public void init() {
IntStream.range(0, 20)
.forEach(value -> {
final Product product = new Product();
product.setName("A default product with the ID " + RANDOM.nextInt());
productService.create(product);
});
}
}
# ... rest of the code ...
|
1585507cfb362f92a5dd711a91dc69b6b0314e18
|
include/lldb/Host/Config.h
|
include/lldb/Host/Config.h
|
//===-- Config.h ------------------------------------------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef LLDB_HOST_CONFIG_H
#define LLDB_HOST_CONFIG_H
#if defined(__APPLE__)
// This block of code only exists to keep the Xcode project working in the
// absence of a configuration step.
#define LLDB_CONFIG_TERMIOS_SUPPORTED 1
#define HAVE_SYS_EVENT_H 1
#define HAVE_PPOLL 0
#else
#error This file is only used by the Xcode build.
#endif
#endif // #ifndef LLDB_HOST_CONFIG_H
|
//===-- Config.h ------------------------------------------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef LLDB_HOST_CONFIG_H
#define LLDB_HOST_CONFIG_H
#if defined(__APPLE__)
// This block of code only exists to keep the Xcode project working in the
// absence of a configuration step.
#define LLDB_CONFIG_TERMIOS_SUPPORTED 1
#define HAVE_SYS_EVENT_H 1
#define HAVE_PPOLL 0
#define HAVE_SIGACTION 1
#else
#error This file is only used by the Xcode build.
#endif
#endif // #ifndef LLDB_HOST_CONFIG_H
|
Define HAVE_SIGACTION to 1 in Xcode build
|
Define HAVE_SIGACTION to 1 in Xcode build
This is needed to make the Xcode project build since it doesn't have auto-generated Config header.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@300618 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
c
|
## Code Before:
//===-- Config.h ------------------------------------------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef LLDB_HOST_CONFIG_H
#define LLDB_HOST_CONFIG_H
#if defined(__APPLE__)
// This block of code only exists to keep the Xcode project working in the
// absence of a configuration step.
#define LLDB_CONFIG_TERMIOS_SUPPORTED 1
#define HAVE_SYS_EVENT_H 1
#define HAVE_PPOLL 0
#else
#error This file is only used by the Xcode build.
#endif
#endif // #ifndef LLDB_HOST_CONFIG_H
## Instruction:
Define HAVE_SIGACTION to 1 in Xcode build
This is needed to make the Xcode project build since it doesn't have auto-generated Config header.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@300618 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
//===-- Config.h ------------------------------------------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef LLDB_HOST_CONFIG_H
#define LLDB_HOST_CONFIG_H
#if defined(__APPLE__)
// This block of code only exists to keep the Xcode project working in the
// absence of a configuration step.
#define LLDB_CONFIG_TERMIOS_SUPPORTED 1
#define HAVE_SYS_EVENT_H 1
#define HAVE_PPOLL 0
#define HAVE_SIGACTION 1
#else
#error This file is only used by the Xcode build.
#endif
#endif // #ifndef LLDB_HOST_CONFIG_H
|
# ... existing code ...
#define HAVE_PPOLL 0
#define HAVE_SIGACTION 1
#else
#error This file is only used by the Xcode build.
# ... rest of the code ...
|
fdba583b95aca8d03525e993d6fe338f59bc601b
|
wysi/core/src/com/seven/game/game_objects/interactions/IHide.java
|
wysi/core/src/com/seven/game/game_objects/interactions/IHide.java
|
package game_objects.interactions;
/**
* Created by mtvarkovsky on 12/10/16.
*/
public interface IHide {
}
|
package com.seven.game.game_objects.interactions;
public interface IHide {
Boolean possibleToHide();
}
|
Add interface for GameObjects to be able to hide
|
Add interface for GameObjects to be able to hide
|
Java
|
mit
|
mtvarkovsky/old_48
|
java
|
## Code Before:
package game_objects.interactions;
/**
* Created by mtvarkovsky on 12/10/16.
*/
public interface IHide {
}
## Instruction:
Add interface for GameObjects to be able to hide
## Code After:
package com.seven.game.game_objects.interactions;
public interface IHide {
Boolean possibleToHide();
}
|
...
package com.seven.game.game_objects.interactions;
public interface IHide {
Boolean possibleToHide();
}
...
|
a4dca78e05e29151fb8d20573f4387d750c10bc3
|
Paystack/PublicHeaders/PSTCKTransactionParams.h
|
Paystack/PublicHeaders/PSTCKTransactionParams.h
|
//
// PSTCKTransactionParams.h
// Paystack
//
#import <Foundation/Foundation.h>
#import "PSTCKFormEncodable.h"
/**
* Representation of a user's credit card details. You can assemble these with information that your user enters and
* then create Paystack tokens with them using an PSTCKAPIClient. @see https://paystack.com/docs/api#cards
*/
@interface PSTCKTransactionParams : NSObject<PSTCKFormEncodable>
@property (nonatomic, copy, nonnull) NSString *email;
@property (nonatomic, nonnull) NSUInteger *amount;
@property (nonatomic, copy, nullable) NSString *reference;
@property (nonatomic, copy, nullable) NSString *subaccount;
@property (nonatomic, nullable) NSUInteger *transactionCharge;
@property (nonatomic, copy, nullable) NSString *bearer;
@property (nonatomic, copy, nullable) NSString *metadata;
@end
|
//
// PSTCKTransactionParams.h
// Paystack
//
#import <Foundation/Foundation.h>
#import "PSTCKFormEncodable.h"
/**
* Representation of a user's credit card details. You can assemble these with information that your user enters and
* then create Paystack tokens with them using an PSTCKAPIClient. @see https://paystack.com/docs/api#cards
*/
@interface PSTCKTransactionParams : NSObject<PSTCKFormEncodable>
@property (nonatomic, copy, nonnull) NSString *email;
@property (nonatomic) NSUInteger amount;
@property (nonatomic, copy, nullable) NSString *reference;
@property (nonatomic, copy, nullable) NSString *subaccount;
@property (nonatomic) NSUInteger transactionCharge;
@property (nonatomic, copy, nullable) NSString *bearer;
@property (nonatomic, copy, nullable) NSString *metadata;
@end
|
Make UInt safe on Transaction Params by not using pointer
|
Make UInt safe on Transaction Params by not using pointer
|
C
|
mit
|
PaystackHQ/paystack-ios,PaystackHQ/paystack-ios,PaystackHQ/paystack-ios,PaystackHQ/paystack-ios
|
c
|
## Code Before:
//
// PSTCKTransactionParams.h
// Paystack
//
#import <Foundation/Foundation.h>
#import "PSTCKFormEncodable.h"
/**
* Representation of a user's credit card details. You can assemble these with information that your user enters and
* then create Paystack tokens with them using an PSTCKAPIClient. @see https://paystack.com/docs/api#cards
*/
@interface PSTCKTransactionParams : NSObject<PSTCKFormEncodable>
@property (nonatomic, copy, nonnull) NSString *email;
@property (nonatomic, nonnull) NSUInteger *amount;
@property (nonatomic, copy, nullable) NSString *reference;
@property (nonatomic, copy, nullable) NSString *subaccount;
@property (nonatomic, nullable) NSUInteger *transactionCharge;
@property (nonatomic, copy, nullable) NSString *bearer;
@property (nonatomic, copy, nullable) NSString *metadata;
@end
## Instruction:
Make UInt safe on Transaction Params by not using pointer
## Code After:
//
// PSTCKTransactionParams.h
// Paystack
//
#import <Foundation/Foundation.h>
#import "PSTCKFormEncodable.h"
/**
* Representation of a user's credit card details. You can assemble these with information that your user enters and
* then create Paystack tokens with them using an PSTCKAPIClient. @see https://paystack.com/docs/api#cards
*/
@interface PSTCKTransactionParams : NSObject<PSTCKFormEncodable>
@property (nonatomic, copy, nonnull) NSString *email;
@property (nonatomic) NSUInteger amount;
@property (nonatomic, copy, nullable) NSString *reference;
@property (nonatomic, copy, nullable) NSString *subaccount;
@property (nonatomic) NSUInteger transactionCharge;
@property (nonatomic, copy, nullable) NSString *bearer;
@property (nonatomic, copy, nullable) NSString *metadata;
@end
|
// ... existing code ...
@interface PSTCKTransactionParams : NSObject<PSTCKFormEncodable>
@property (nonatomic, copy, nonnull) NSString *email;
@property (nonatomic) NSUInteger amount;
@property (nonatomic, copy, nullable) NSString *reference;
@property (nonatomic, copy, nullable) NSString *subaccount;
@property (nonatomic) NSUInteger transactionCharge;
@property (nonatomic, copy, nullable) NSString *bearer;
@property (nonatomic, copy, nullable) NSString *metadata;
// ... rest of the code ...
|
c55243d591793a9213d27126a3c240bb47c5f82b
|
cartoframes/core/cartodataframe.py
|
cartoframes/core/cartodataframe.py
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
super(CartoDataFrame, self).__init__(*args, **kwargs)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(self, *args, **kwargs):
from ..io.carto import to_carto
return to_carto(self, *args, **kwargs)
def convert(self, index_column=None, geom_column=None, lnglat_columns=None,
drop_index=True, drop_geom=True, drop_lnglat=True):
# Magic function
generate_index(self, index_column, drop_index)
generate_geometry(self, geom_column, lnglat_columns, drop_geom, drop_lnglat)
return self
def visualize(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
viz = visualize
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
super(CartoDataFrame, self).__init__(*args, **kwargs)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(self, *args, **kwargs):
from ..io.carto import to_carto
return to_carto(self, *args, **kwargs)
def convert(self, index_column=None, geom_column=None, lnglat_columns=None,
drop_index=True, drop_geom=True, drop_lnglat=True):
# Magic function
generate_index(self, index_column, drop_index)
generate_geometry(self, geom_column, lnglat_columns, drop_geom, drop_lnglat)
return self
def viz(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
Rename visualize to viz in CDF
|
Rename visualize to viz in CDF
|
Python
|
bsd-3-clause
|
CartoDB/cartoframes,CartoDB/cartoframes
|
python
|
## Code Before:
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
super(CartoDataFrame, self).__init__(*args, **kwargs)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(self, *args, **kwargs):
from ..io.carto import to_carto
return to_carto(self, *args, **kwargs)
def convert(self, index_column=None, geom_column=None, lnglat_columns=None,
drop_index=True, drop_geom=True, drop_lnglat=True):
# Magic function
generate_index(self, index_column, drop_index)
generate_geometry(self, geom_column, lnglat_columns, drop_geom, drop_lnglat)
return self
def visualize(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
viz = visualize
## Instruction:
Rename visualize to viz in CDF
## Code After:
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
super(CartoDataFrame, self).__init__(*args, **kwargs)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(self, *args, **kwargs):
from ..io.carto import to_carto
return to_carto(self, *args, **kwargs)
def convert(self, index_column=None, geom_column=None, lnglat_columns=None,
drop_index=True, drop_geom=True, drop_lnglat=True):
# Magic function
generate_index(self, index_column, drop_index)
generate_geometry(self, geom_column, lnglat_columns, drop_geom, drop_lnglat)
return self
def viz(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
// ... existing code ...
generate_geometry(self, geom_column, lnglat_columns, drop_geom, drop_lnglat)
return self
def viz(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
// ... rest of the code ...
|
3bef86bd3637642587ed15680249c278504fc4fb
|
pontoon/administration/management/commands/update_projects.py
|
pontoon/administration/management/commands/update_projects.py
|
import os
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
self.stdout.write('Successfully updated project "%s"' % project)
except Exception as e:
raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
|
import os
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
now = datetime.datetime.now()
self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
now = datetime.datetime.now()
raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
|
Add timestamp and newline to log messages
|
Add timestamp and newline to log messages
|
Python
|
bsd-3-clause
|
mathjazz/pontoon,yfdyh000/pontoon,jotes/pontoon,mozilla/pontoon,sudheesh001/pontoon,mathjazz/pontoon,jotes/pontoon,vivekanand1101/pontoon,mastizada/pontoon,jotes/pontoon,m8ttyB/pontoon,Jobava/mirror-pontoon,Jobava/mirror-pontoon,participedia/pontoon,jotes/pontoon,yfdyh000/pontoon,m8ttyB/pontoon,vivekanand1101/pontoon,vivekanand1101/pontoon,sudheesh001/pontoon,sudheesh001/pontoon,Osmose/pontoon,m8ttyB/pontoon,Osmose/pontoon,Osmose/pontoon,participedia/pontoon,sudheesh001/pontoon,Jobava/mirror-pontoon,mastizada/pontoon,Osmose/pontoon,Jobava/mirror-pontoon,mozilla/pontoon,mozilla/pontoon,m8ttyB/pontoon,yfdyh000/pontoon,mathjazz/pontoon,mathjazz/pontoon,mathjazz/pontoon,vivekanand1101/pontoon,mastizada/pontoon,mozilla/pontoon,participedia/pontoon,participedia/pontoon,mozilla/pontoon,mastizada/pontoon,yfdyh000/pontoon
|
python
|
## Code Before:
import os
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
self.stdout.write('Successfully updated project "%s"' % project)
except Exception as e:
raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
## Instruction:
Add timestamp and newline to log messages
## Code After:
import os
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
now = datetime.datetime.now()
self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
now = datetime.datetime.now()
raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
|
// ... existing code ...
import os
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
// ... modified code ...
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
now = datetime.datetime.now()
self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
now = datetime.datetime.now()
raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
// ... rest of the code ...
|
8e75605e0511b85dfd500b644613739f29705da6
|
cfnf.py
|
cfnf.py
|
import sublime, sublime_plugin
import time
class cfnewfile(sublime_plugin.TextCommand):
def run(self, edit):
localtime = time.asctime( time.localtime(time.time()) )
self.view.insert(edit,0,"<!---\r\n Name:\r\n Description:\r\n Written By:\r\n Date Created: "+localtime+"\r\n History:\r\n--->\r\n")
|
import sublime, sublime_plugin
import time
class cfnfCommand(sublime_plugin.WindowCommand):
def run(self):
a = self.window.new_file()
a.run_command("addheader")
class addheaderCommand(sublime_plugin.TextCommand):
def run(self, edit):
localtime = time.asctime( time.localtime(time.time()) )
self.view.insert(edit,0,"<!---\n Name:\n Description:\n Written By:\n Date Created: "+localtime+"\n History:\n--->\n")
|
Send text to new file
|
Send text to new file
|
Python
|
bsd-2-clause
|
dwkd/SublimeCFNewFile
|
python
|
## Code Before:
import sublime, sublime_plugin
import time
class cfnewfile(sublime_plugin.TextCommand):
def run(self, edit):
localtime = time.asctime( time.localtime(time.time()) )
self.view.insert(edit,0,"<!---\r\n Name:\r\n Description:\r\n Written By:\r\n Date Created: "+localtime+"\r\n History:\r\n--->\r\n")
## Instruction:
Send text to new file
## Code After:
import sublime, sublime_plugin
import time
class cfnfCommand(sublime_plugin.WindowCommand):
def run(self):
a = self.window.new_file()
a.run_command("addheader")
class addheaderCommand(sublime_plugin.TextCommand):
def run(self, edit):
localtime = time.asctime( time.localtime(time.time()) )
self.view.insert(edit,0,"<!---\n Name:\n Description:\n Written By:\n Date Created: "+localtime+"\n History:\n--->\n")
|
...
import sublime, sublime_plugin
import time
class cfnfCommand(sublime_plugin.WindowCommand):
def run(self):
a = self.window.new_file()
a.run_command("addheader")
class addheaderCommand(sublime_plugin.TextCommand):
def run(self, edit):
localtime = time.asctime( time.localtime(time.time()) )
self.view.insert(edit,0,"<!---\n Name:\n Description:\n Written By:\n Date Created: "+localtime+"\n History:\n--->\n")
...
|
dc140f6c7bc6fe03ec60a5b1029d7bc7463d2a0e
|
pydarkstar/scrubbing/scrubber.py
|
pydarkstar/scrubbing/scrubber.py
|
from ..darkobject import DarkObject
from bs4 import BeautifulSoup
import logging
import time
from urllib.request import urlopen
class Scrubber(DarkObject):
def __init__(self):
super(Scrubber, self).__init__()
def scrub(self):
"""
Get item metadata.
"""
return {}
# noinspection PyBroadException
@staticmethod
def soup(url):
"""
Open URL and create tag soup.
:param url: website string
:type url: str
"""
handle = ''
max_tries = 10
for i in range(max_tries):
# noinspection PyPep8
try:
handle = urlopen(url)
handle = handle.read()
break
except:
logging.exception('urlopen failed (attempt %d)', i + 1)
if i == max_tries - 1:
logging.error('the maximum urlopen attempts have been reached')
raise
time.sleep(1)
s = BeautifulSoup(handle, features='html5lib')
return s
if __name__ == '__main__':
pass
|
from ..darkobject import DarkObject
from bs4 import BeautifulSoup
import requests
import logging
import time
class Scrubber(DarkObject):
def __init__(self):
super(Scrubber, self).__init__()
def scrub(self):
"""
Get item metadata.
"""
return {}
# noinspection PyBroadException
@staticmethod
def soup(url, absolute: bool = False, **kwargs):
"""
Open URL and create tag soup.
:param url: website string
:type url: str
:param absolute: perform double get request to find absolute url
:type absolute: bool
"""
handle = ''
max_tries = 10
for i in range(max_tries):
# noinspection PyPep8
try:
if absolute:
url = requests.get(url).url
handle = requests.get(url, params=kwargs).text
break
except Exception:
logging.exception('urlopen failed (attempt %d)', i + 1)
if i == max_tries - 1:
logging.error('the maximum urlopen attempts have been reached')
raise
time.sleep(1)
s = BeautifulSoup(handle, features='html5lib')
return s
if __name__ == '__main__':
pass
|
Add support for absolute URL in request
|
Add support for absolute URL in request
|
Python
|
mit
|
AdamGagorik/pydarkstar
|
python
|
## Code Before:
from ..darkobject import DarkObject
from bs4 import BeautifulSoup
import logging
import time
from urllib.request import urlopen
class Scrubber(DarkObject):
def __init__(self):
super(Scrubber, self).__init__()
def scrub(self):
"""
Get item metadata.
"""
return {}
# noinspection PyBroadException
@staticmethod
def soup(url):
"""
Open URL and create tag soup.
:param url: website string
:type url: str
"""
handle = ''
max_tries = 10
for i in range(max_tries):
# noinspection PyPep8
try:
handle = urlopen(url)
handle = handle.read()
break
except:
logging.exception('urlopen failed (attempt %d)', i + 1)
if i == max_tries - 1:
logging.error('the maximum urlopen attempts have been reached')
raise
time.sleep(1)
s = BeautifulSoup(handle, features='html5lib')
return s
if __name__ == '__main__':
pass
## Instruction:
Add support for absolute URL in request
## Code After:
from ..darkobject import DarkObject
from bs4 import BeautifulSoup
import requests
import logging
import time
class Scrubber(DarkObject):
def __init__(self):
super(Scrubber, self).__init__()
def scrub(self):
"""
Get item metadata.
"""
return {}
# noinspection PyBroadException
@staticmethod
def soup(url, absolute: bool = False, **kwargs):
"""
Open URL and create tag soup.
:param url: website string
:type url: str
:param absolute: perform double get request to find absolute url
:type absolute: bool
"""
handle = ''
max_tries = 10
for i in range(max_tries):
# noinspection PyPep8
try:
if absolute:
url = requests.get(url).url
handle = requests.get(url, params=kwargs).text
break
except Exception:
logging.exception('urlopen failed (attempt %d)', i + 1)
if i == max_tries - 1:
logging.error('the maximum urlopen attempts have been reached')
raise
time.sleep(1)
s = BeautifulSoup(handle, features='html5lib')
return s
if __name__ == '__main__':
pass
|
...
from ..darkobject import DarkObject
from bs4 import BeautifulSoup
import requests
import logging
import time
class Scrubber(DarkObject):
...
# noinspection PyBroadException
@staticmethod
def soup(url, absolute: bool = False, **kwargs):
"""
Open URL and create tag soup.
:param url: website string
:type url: str
:param absolute: perform double get request to find absolute url
:type absolute: bool
"""
handle = ''
max_tries = 10
...
for i in range(max_tries):
# noinspection PyPep8
try:
if absolute:
url = requests.get(url).url
handle = requests.get(url, params=kwargs).text
break
except Exception:
logging.exception('urlopen failed (attempt %d)', i + 1)
if i == max_tries - 1:
logging.error('the maximum urlopen attempts have been reached')
...
|
584110ed44810e40f029e985612acdb421d504b3
|
LYPopView/Classes/PopView.h
|
LYPopView/Classes/PopView.h
|
//
// PopView.h
// LYPOPVIEW
//
// CREATED BY LUO YU ON 19/12/2016.
// COPYRIGHT © 2016 LUO YU. ALL RIGHTS RESERVED.
//
#ifndef PopView_h
#define PopView_h
#import <LYPopView/LYPopView.h>
#import <LYPopView/LYPopMessage.h>
#import <LYPopView/LYPopTable.h>
#import <LYPopView/LYPopActionView.h>
#endif /* PopView_h */
|
//
// PopView.h
// LYPOPVIEW
//
// CREATED BY LUO YU ON 19/12/2016.
// COPYRIGHT © 2016 LUO YU. ALL RIGHTS RESERVED.
//
#ifndef PopView_h
#define PopView_h
#import <LYPopView/LYPopView.h>
#import <LYPopView/LYPopMessage.h>
#import <LYPopView/LYPopTable.h>
#import <LYPopView/LYPopDate.h>
#import <LYPopView/LYPopActionView.h>
#endif /* PopView_h */
|
Add : date picker pop view importer
|
Add : date picker pop view importer
|
C
|
mit
|
blodely/LYPopView,blodely/LYPopView
|
c
|
## Code Before:
//
// PopView.h
// LYPOPVIEW
//
// CREATED BY LUO YU ON 19/12/2016.
// COPYRIGHT © 2016 LUO YU. ALL RIGHTS RESERVED.
//
#ifndef PopView_h
#define PopView_h
#import <LYPopView/LYPopView.h>
#import <LYPopView/LYPopMessage.h>
#import <LYPopView/LYPopTable.h>
#import <LYPopView/LYPopActionView.h>
#endif /* PopView_h */
## Instruction:
Add : date picker pop view importer
## Code After:
//
// PopView.h
// LYPOPVIEW
//
// CREATED BY LUO YU ON 19/12/2016.
// COPYRIGHT © 2016 LUO YU. ALL RIGHTS RESERVED.
//
#ifndef PopView_h
#define PopView_h
#import <LYPopView/LYPopView.h>
#import <LYPopView/LYPopMessage.h>
#import <LYPopView/LYPopTable.h>
#import <LYPopView/LYPopDate.h>
#import <LYPopView/LYPopActionView.h>
#endif /* PopView_h */
|
...
#import <LYPopView/LYPopView.h>
#import <LYPopView/LYPopMessage.h>
#import <LYPopView/LYPopTable.h>
#import <LYPopView/LYPopDate.h>
#import <LYPopView/LYPopActionView.h>
#endif /* PopView_h */
...
|
fdcc7f0e45fe5f0d284f47941238a92cbe9a1b36
|
attest/tests/__init__.py
|
attest/tests/__init__.py
|
from attest import Tests
suite = lambda mod: 'attest.tests.' + mod + '.suite'
all = Tests([suite('asserts'),
suite('collections'),
suite('classy'),
suite('reporters'),
suite('eval'),
])
|
from attest import Tests
all = Tests('.'.join((__name__, mod, 'suite'))
for mod in ('asserts',
'collections',
'classy',
'reporters',
'eval'))
|
Simplify our own test suite
|
Simplify our own test suite
|
Python
|
bsd-2-clause
|
dag/attest
|
python
|
## Code Before:
from attest import Tests
suite = lambda mod: 'attest.tests.' + mod + '.suite'
all = Tests([suite('asserts'),
suite('collections'),
suite('classy'),
suite('reporters'),
suite('eval'),
])
## Instruction:
Simplify our own test suite
## Code After:
from attest import Tests
all = Tests('.'.join((__name__, mod, 'suite'))
for mod in ('asserts',
'collections',
'classy',
'reporters',
'eval'))
|
// ... existing code ...
from attest import Tests
all = Tests('.'.join((__name__, mod, 'suite'))
for mod in ('asserts',
'collections',
'classy',
'reporters',
'eval'))
// ... rest of the code ...
|
97eabe6697e58f3b4dd8cced9a2c3bf05f3444c2
|
accounting/apps/books/context_processors.py
|
accounting/apps/books/context_processors.py
|
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user_organizations = request.user.organizations.all()
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
|
from django.db.models import Q
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user = request.user
user_organizations = (Organization.objects
.filter(Q(members=user) | Q(owner=user)))
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
|
Use owner or member filter for the dropdown
|
Use owner or member filter for the dropdown
|
Python
|
mit
|
kenjhim/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,kenjhim/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting
|
python
|
## Code Before:
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user_organizations = request.user.organizations.all()
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
## Instruction:
Use owner or member filter for the dropdown
## Code After:
from django.db.models import Q
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user = request.user
user_organizations = (Organization.objects
.filter(Q(members=user) | Q(owner=user)))
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
|
# ... existing code ...
from django.db.models import Q
from .utils import organization_manager
from .models import Organization
# ... modified code ...
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user = request.user
user_organizations = (Organization.objects
.filter(Q(members=user) | Q(owner=user)))
return {
'user_organizations': user_organizations,
# ... rest of the code ...
|
1d84a3b58aa752834aed31123dd16e3bfa723609
|
tests/storage_adapter_tests/test_storage_adapter.py
|
tests/storage_adapter_tests/test_storage_adapter.py
|
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_find(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.find('')
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_get_response_statements(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_response_statements()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
|
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
|
Remove tests for storage adapter methods being removed.
|
Remove tests for storage adapter methods being removed.
|
Python
|
bsd-3-clause
|
vkosuri/ChatterBot,gunthercox/ChatterBot
|
python
|
## Code Before:
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_find(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.find('')
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_get_response_statements(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_response_statements()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
## Instruction:
Remove tests for storage adapter methods being removed.
## Code After:
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
|
# ... existing code ...
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
# ... modified code ...
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
# ... rest of the code ...
|
5ac661e80c92e90ae0ed8ba658570db8f76e8490
|
modules/acct_rtcp_hep/_acct_rtcp_hep_config.h
|
modules/acct_rtcp_hep/_acct_rtcp_hep_config.h
|
static struct hep_ctx ctx = {
.initfails = 0,
.hints = {{ 0 }},
.capt_host = "10.0.0.1",
.capt_port = "9060",
.capt_id = 101,
.hep_version = 3,
.usessl = 0,
.pl_compress = 0,
.sendPacketsCount = 0
};
|
static struct hep_ctx ctx = {
.initfails = 0,
.hints = {{ 0 }},
.capt_host = "10.0.0.1",
.capt_port = "9060",
.hints = {{ .ai_socktype = SOCK_DGRAM }},
.capt_id = 101,
.hep_version = 3,
.usessl = 0,
.pl_compress = 0,
.sendPacketsCount = 0
};
|
Add .hints initializer for clarity.
|
Add .hints initializer for clarity.
|
C
|
bsd-2-clause
|
dsanders11/rtpproxy,sippy/rtpproxy,sippy/rtpproxy,dsanders11/rtpproxy,dsanders11/rtpproxy,sippy/rtpproxy
|
c
|
## Code Before:
static struct hep_ctx ctx = {
.initfails = 0,
.hints = {{ 0 }},
.capt_host = "10.0.0.1",
.capt_port = "9060",
.capt_id = 101,
.hep_version = 3,
.usessl = 0,
.pl_compress = 0,
.sendPacketsCount = 0
};
## Instruction:
Add .hints initializer for clarity.
## Code After:
static struct hep_ctx ctx = {
.initfails = 0,
.hints = {{ 0 }},
.capt_host = "10.0.0.1",
.capt_port = "9060",
.hints = {{ .ai_socktype = SOCK_DGRAM }},
.capt_id = 101,
.hep_version = 3,
.usessl = 0,
.pl_compress = 0,
.sendPacketsCount = 0
};
|
# ... existing code ...
.hints = {{ 0 }},
.capt_host = "10.0.0.1",
.capt_port = "9060",
.hints = {{ .ai_socktype = SOCK_DGRAM }},
.capt_id = 101,
.hep_version = 3,
.usessl = 0,
# ... rest of the code ...
|
3e23ab61c6fe1bfd219f94bae2857985ef50da69
|
Pod/Classes/CMHLoginViewController.h
|
Pod/Classes/CMHLoginViewController.h
|
@class CMHLoginViewController;
@protocol CMHLoginViewControllerDelegate <NSObject>
@optional
- (void)loginViewControllerCancelled:(CMHLoginViewController *_Nonnull)viewController;
- (void)loginViewController:(CMHLoginViewController *_Nonnull)viewController didLogin:(BOOL)success error:(NSError *_Nullable)error;
@end
@interface CMHLoginViewController : ORKTaskViewController
- (_Nonnull instancetype)initWithTitle:(NSString *_Nullable)title text:(NSString *_Nullable)text delegate:(id<CMHLoginViewControllerDelegate>)delegate;
@property (weak, nonatomic, nullable) id<CMHLoginViewControllerDelegate> loginDelegate;
- (instancetype)initWithTask:(nullable id<ORKTask>)task taskRunUUID:(nullable NSUUID *)taskRunUUID NS_UNAVAILABLE;
- (instancetype)initWithCoder:(NSCoder *)aDecoder NS_UNAVAILABLE;
- (instancetype)initWithNibName:(nullable NSString *)nibNameOrNil bundle:(nullable NSBundle *)nibBundleOrNil NS_UNAVAILABLE;
- (instancetype)initWithTask:(nullable id<ORKTask>)task restorationData:(nullable NSData *)data delegate:(nullable id<ORKTaskViewControllerDelegate>)delegate NS_UNAVAILABLE;
@end
|
@class CMHLoginViewController;
@protocol CMHLoginViewControllerDelegate <NSObject>
@optional
- (void)loginViewControllerCancelled:(CMHLoginViewController *_Nonnull)viewController;
- (void)loginViewController:(CMHLoginViewController *_Nonnull)viewController didLogin:(BOOL)success error:(NSError *_Nullable)error;
@end
@interface CMHLoginViewController : ORKTaskViewController
- (_Nonnull instancetype)initWithTitle:(NSString *_Nullable)title text:(NSString *_Nullable)text delegate:(_Nullable id<CMHLoginViewControllerDelegate>)delegate;
@property (weak, nonatomic, nullable) id<CMHLoginViewControllerDelegate> loginDelegate;
- (_Null_unspecified instancetype)initWithTask:(nullable id<ORKTask>)task taskRunUUID:(nullable NSUUID *)taskRunUUID NS_UNAVAILABLE;
- (_Null_unspecified instancetype)initWithCoder:(NSCoder *_Null_unspecified)aDecoder NS_UNAVAILABLE;
- (_Null_unspecified instancetype)initWithNibName:(nullable NSString *)nibNameOrNil bundle:(nullable NSBundle *)nibBundleOrNil NS_UNAVAILABLE;
- (_Null_unspecified instancetype)initWithTask:(nullable id<ORKTask>)task restorationData:(nullable NSData *)data delegate:(nullable id<ORKTaskViewControllerDelegate>)delegate NS_UNAVAILABLE;
@end
|
Add nullability annotations to the custom login view controller's public interface
|
Add nullability annotations to the custom login view controller's public interface
|
C
|
mit
|
cloudmine/CMHealthSDK-iOS,cloudmine/CMHealthSDK-iOS,cloudmine/CMHealthSDK
|
c
|
## Code Before:
@class CMHLoginViewController;
@protocol CMHLoginViewControllerDelegate <NSObject>
@optional
- (void)loginViewControllerCancelled:(CMHLoginViewController *_Nonnull)viewController;
- (void)loginViewController:(CMHLoginViewController *_Nonnull)viewController didLogin:(BOOL)success error:(NSError *_Nullable)error;
@end
@interface CMHLoginViewController : ORKTaskViewController
- (_Nonnull instancetype)initWithTitle:(NSString *_Nullable)title text:(NSString *_Nullable)text delegate:(id<CMHLoginViewControllerDelegate>)delegate;
@property (weak, nonatomic, nullable) id<CMHLoginViewControllerDelegate> loginDelegate;
- (instancetype)initWithTask:(nullable id<ORKTask>)task taskRunUUID:(nullable NSUUID *)taskRunUUID NS_UNAVAILABLE;
- (instancetype)initWithCoder:(NSCoder *)aDecoder NS_UNAVAILABLE;
- (instancetype)initWithNibName:(nullable NSString *)nibNameOrNil bundle:(nullable NSBundle *)nibBundleOrNil NS_UNAVAILABLE;
- (instancetype)initWithTask:(nullable id<ORKTask>)task restorationData:(nullable NSData *)data delegate:(nullable id<ORKTaskViewControllerDelegate>)delegate NS_UNAVAILABLE;
@end
## Instruction:
Add nullability annotations to the custom login view controller's public interface
## Code After:
@class CMHLoginViewController;
@protocol CMHLoginViewControllerDelegate <NSObject>
@optional
- (void)loginViewControllerCancelled:(CMHLoginViewController *_Nonnull)viewController;
- (void)loginViewController:(CMHLoginViewController *_Nonnull)viewController didLogin:(BOOL)success error:(NSError *_Nullable)error;
@end
@interface CMHLoginViewController : ORKTaskViewController
- (_Nonnull instancetype)initWithTitle:(NSString *_Nullable)title text:(NSString *_Nullable)text delegate:(_Nullable id<CMHLoginViewControllerDelegate>)delegate;
@property (weak, nonatomic, nullable) id<CMHLoginViewControllerDelegate> loginDelegate;
- (_Null_unspecified instancetype)initWithTask:(nullable id<ORKTask>)task taskRunUUID:(nullable NSUUID *)taskRunUUID NS_UNAVAILABLE;
- (_Null_unspecified instancetype)initWithCoder:(NSCoder *_Null_unspecified)aDecoder NS_UNAVAILABLE;
- (_Null_unspecified instancetype)initWithNibName:(nullable NSString *)nibNameOrNil bundle:(nullable NSBundle *)nibBundleOrNil NS_UNAVAILABLE;
- (_Null_unspecified instancetype)initWithTask:(nullable id<ORKTask>)task restorationData:(nullable NSData *)data delegate:(nullable id<ORKTaskViewControllerDelegate>)delegate NS_UNAVAILABLE;
@end
|
// ... existing code ...
@interface CMHLoginViewController : ORKTaskViewController
- (_Nonnull instancetype)initWithTitle:(NSString *_Nullable)title text:(NSString *_Nullable)text delegate:(_Nullable id<CMHLoginViewControllerDelegate>)delegate;
@property (weak, nonatomic, nullable) id<CMHLoginViewControllerDelegate> loginDelegate;
- (_Null_unspecified instancetype)initWithTask:(nullable id<ORKTask>)task taskRunUUID:(nullable NSUUID *)taskRunUUID NS_UNAVAILABLE;
- (_Null_unspecified instancetype)initWithCoder:(NSCoder *_Null_unspecified)aDecoder NS_UNAVAILABLE;
- (_Null_unspecified instancetype)initWithNibName:(nullable NSString *)nibNameOrNil bundle:(nullable NSBundle *)nibBundleOrNil NS_UNAVAILABLE;
- (_Null_unspecified instancetype)initWithTask:(nullable id<ORKTask>)task restorationData:(nullable NSData *)data delegate:(nullable id<ORKTaskViewControllerDelegate>)delegate NS_UNAVAILABLE;
@end
// ... rest of the code ...
|
2959fa0a9f69cbfb7611bbc12488089921d26ab8
|
IPython/frontend/html/notebook/__init__.py
|
IPython/frontend/html/notebook/__init__.py
|
"""The IPython HTML Notebook"""
# check for tornado 2.1.0
msg = "The IPython Notebook requires tornado >= 2.1.0"
try:
import tornado
except ImportError:
raise ImportError(msg)
else:
if tornado.version_info < (2,1,0):
raise ImportError(msg+", but you have %s"%tornado.version)
del msg
|
"""The IPython HTML Notebook"""
# check for tornado 2.1.0
msg = "The IPython Notebook requires tornado >= 2.1.0"
try:
import tornado
except ImportError:
raise ImportError(msg)
try:
version_info = tornado.version_info
except AttributeError:
raise ImportError(msg + ", but you have < 1.1.0")
if version_info < (2,1,0):
raise ImportError(msg + ", but you have %s" % tornado.version)
del msg
|
Fix for tornado check for tornado < 1.1.0
|
Fix for tornado check for tornado < 1.1.0
Tornado < 1.1.0 does not have the ``version_info`` variable to check.
Debian squeeze has tornado 1.0.1.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
python
|
## Code Before:
"""The IPython HTML Notebook"""
# check for tornado 2.1.0
msg = "The IPython Notebook requires tornado >= 2.1.0"
try:
import tornado
except ImportError:
raise ImportError(msg)
else:
if tornado.version_info < (2,1,0):
raise ImportError(msg+", but you have %s"%tornado.version)
del msg
## Instruction:
Fix for tornado check for tornado < 1.1.0
Tornado < 1.1.0 does not have the ``version_info`` variable to check.
Debian squeeze has tornado 1.0.1.
## Code After:
"""The IPython HTML Notebook"""
# check for tornado 2.1.0
msg = "The IPython Notebook requires tornado >= 2.1.0"
try:
import tornado
except ImportError:
raise ImportError(msg)
try:
version_info = tornado.version_info
except AttributeError:
raise ImportError(msg + ", but you have < 1.1.0")
if version_info < (2,1,0):
raise ImportError(msg + ", but you have %s" % tornado.version)
del msg
|
...
import tornado
except ImportError:
raise ImportError(msg)
try:
version_info = tornado.version_info
except AttributeError:
raise ImportError(msg + ", but you have < 1.1.0")
if version_info < (2,1,0):
raise ImportError(msg + ", but you have %s" % tornado.version)
del msg
...
|
cd6752a2866631eeea0dcbcf37f24d825f5e4a50
|
vpc/vpc_content/search_indexes.py
|
vpc/vpc_content/search_indexes.py
|
import datetime
from haystack.indexes import SearchIndex, CharField, DateTimeField
from haystack import site
from models import Author, Material
class AuthorIndex(SearchIndex):
# the used template contains fullname and author bio
# Zniper thinks this line below also is OK:
# text = CharField(document=True, model_attr='text')
fullname = CharField(model_attr='fullname')
text = CharField(document=True, use_template=True)
def index_queryset(self):
"""Used when entire index for model is updated"""
return Author.objects.all()
class MaterialIndex(SearchIndex):
# "text" combines normal body, title, description and keywords
text = CharField(document=True, use_template=True)
material_id = CharField(model_attr='material_id')
title = CharField(model_attr='title')
description = CharField(model_attr='description')
modified = DateTimeField(model_attr='modified')
material_type = DateTimeField(model_attr='modified')
def index_queryset(self):
"""When entired index for model is updated"""
return Material.objects.all()
site.register(Author, AuthorIndex)
site.register(Material, MaterialIndex)
|
import datetime
from haystack.indexes import SearchIndex, RealTimeSearchIndex
from haystack.indexes import CharField, DateTimeField
from haystack import site
from models import Author, Material
class AuthorIndex(RealTimeSearchIndex):
# the used template contains fullname and author bio
# Zniper thinks this line below also is OK:
# text = CharField(document=True, model_attr='text')
fullname = CharField(model_attr='fullname')
text = CharField(document=True, use_template=True)
def index_queryset(self):
"""Used when entire index for model is updated"""
return Author.objects.all()
class MaterialIndex(RealTimeSearchIndex):
# "text" combines normal body, title, description and keywords
text = CharField(document=True, use_template=True)
material_id = CharField(model_attr='material_id')
title = CharField(model_attr='title')
description = CharField(model_attr='description')
modified = DateTimeField(model_attr='modified')
material_type = DateTimeField(model_attr='modified')
def index_queryset(self):
"""When entired index for model is updated"""
return Material.objects.all()
site.register(Author, AuthorIndex)
site.register(Material, MaterialIndex)
|
Make indexing on real time
|
Make indexing on real time
|
Python
|
agpl-3.0
|
voer-platform/vp.repo,voer-platform/vp.repo,voer-platform/vp.repo,voer-platform/vp.repo
|
python
|
## Code Before:
import datetime
from haystack.indexes import SearchIndex, CharField, DateTimeField
from haystack import site
from models import Author, Material
class AuthorIndex(SearchIndex):
# the used template contains fullname and author bio
# Zniper thinks this line below also is OK:
# text = CharField(document=True, model_attr='text')
fullname = CharField(model_attr='fullname')
text = CharField(document=True, use_template=True)
def index_queryset(self):
"""Used when entire index for model is updated"""
return Author.objects.all()
class MaterialIndex(SearchIndex):
# "text" combines normal body, title, description and keywords
text = CharField(document=True, use_template=True)
material_id = CharField(model_attr='material_id')
title = CharField(model_attr='title')
description = CharField(model_attr='description')
modified = DateTimeField(model_attr='modified')
material_type = DateTimeField(model_attr='modified')
def index_queryset(self):
"""When entired index for model is updated"""
return Material.objects.all()
site.register(Author, AuthorIndex)
site.register(Material, MaterialIndex)
## Instruction:
Make indexing on real time
## Code After:
import datetime
from haystack.indexes import SearchIndex, RealTimeSearchIndex
from haystack.indexes import CharField, DateTimeField
from haystack import site
from models import Author, Material
class AuthorIndex(RealTimeSearchIndex):
# the used template contains fullname and author bio
# Zniper thinks this line below also is OK:
# text = CharField(document=True, model_attr='text')
fullname = CharField(model_attr='fullname')
text = CharField(document=True, use_template=True)
def index_queryset(self):
"""Used when entire index for model is updated"""
return Author.objects.all()
class MaterialIndex(RealTimeSearchIndex):
# "text" combines normal body, title, description and keywords
text = CharField(document=True, use_template=True)
material_id = CharField(model_attr='material_id')
title = CharField(model_attr='title')
description = CharField(model_attr='description')
modified = DateTimeField(model_attr='modified')
material_type = DateTimeField(model_attr='modified')
def index_queryset(self):
"""When entired index for model is updated"""
return Material.objects.all()
site.register(Author, AuthorIndex)
site.register(Material, MaterialIndex)
|
...
import datetime
from haystack.indexes import SearchIndex, RealTimeSearchIndex
from haystack.indexes import CharField, DateTimeField
from haystack import site
from models import Author, Material
class AuthorIndex(RealTimeSearchIndex):
# the used template contains fullname and author bio
# Zniper thinks this line below also is OK:
# text = CharField(document=True, model_attr='text')
...
return Author.objects.all()
class MaterialIndex(RealTimeSearchIndex):
# "text" combines normal body, title, description and keywords
text = CharField(document=True, use_template=True)
material_id = CharField(model_attr='material_id')
...
|
69a47852dced2f53cc0a29361c9b6b6229ef0b9e
|
devhub-server/web/src/main/java/nl/tudelft/ewi/dea/jaxrs/html/LoginPage.java
|
devhub-server/web/src/main/java/nl/tudelft/ewi/dea/jaxrs/html/LoginPage.java
|
package nl.tudelft.ewi.dea.jaxrs.html;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import nl.tudelft.ewi.dea.BuildInfo;
import nl.tudelft.ewi.dea.jaxrs.html.utils.Renderer;
import com.google.inject.persist.Transactional;
import com.google.inject.servlet.RequestScoped;
@RequestScoped
@Path("login")
@Produces(MediaType.TEXT_HTML)
public class LoginPage {
private final Renderer renderer;
private final BuildInfo buildInfo;
@Inject
public LoginPage(Renderer renderer, BuildInfo buildInfo) {
this.renderer = renderer;
this.buildInfo = buildInfo;
}
@GET
@Transactional
public String servePage() {
return renderer
.setValue("buildInfo", buildInfo)
.render("login.tpl");
}
}
|
package nl.tudelft.ewi.dea.jaxrs.html;
import java.net.URI;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import nl.tudelft.ewi.dea.BuildInfo;
import nl.tudelft.ewi.dea.jaxrs.html.utils.Renderer;
import nl.tudelft.ewi.dea.security.SecurityProvider;
import com.google.inject.persist.Transactional;
import com.google.inject.servlet.RequestScoped;
@RequestScoped
@Path("login")
@Produces(MediaType.TEXT_HTML)
public class LoginPage {
private final Renderer renderer;
private final BuildInfo buildInfo;
private final SecurityProvider securityProvider;
@Inject
public LoginPage(SecurityProvider securityProvider,
Renderer renderer, BuildInfo buildInfo) {
this.securityProvider = securityProvider;
this.renderer = renderer;
this.buildInfo = buildInfo;
}
@GET
@Transactional
public Response servePage() {
if (securityProvider.getSubject().isAuthenticated()) {
return Response.seeOther(URI.create("/dashboard")).build();
}
return Response.ok(renderer
.setValue("buildInfo", buildInfo)
.render("login.tpl")).build();
}
}
|
Fix issue-45: When already logged in, the login page should redirect to the dashboard.
|
Fix issue-45: When already logged in, the login page should redirect to the dashboard.
|
Java
|
apache-2.0
|
devhub-tud/devhub-prototype,devhub-tud/devhub-prototype,devhub-tud/devhub-prototype
|
java
|
## Code Before:
package nl.tudelft.ewi.dea.jaxrs.html;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import nl.tudelft.ewi.dea.BuildInfo;
import nl.tudelft.ewi.dea.jaxrs.html.utils.Renderer;
import com.google.inject.persist.Transactional;
import com.google.inject.servlet.RequestScoped;
@RequestScoped
@Path("login")
@Produces(MediaType.TEXT_HTML)
public class LoginPage {
private final Renderer renderer;
private final BuildInfo buildInfo;
@Inject
public LoginPage(Renderer renderer, BuildInfo buildInfo) {
this.renderer = renderer;
this.buildInfo = buildInfo;
}
@GET
@Transactional
public String servePage() {
return renderer
.setValue("buildInfo", buildInfo)
.render("login.tpl");
}
}
## Instruction:
Fix issue-45: When already logged in, the login page should redirect to the dashboard.
## Code After:
package nl.tudelft.ewi.dea.jaxrs.html;
import java.net.URI;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import nl.tudelft.ewi.dea.BuildInfo;
import nl.tudelft.ewi.dea.jaxrs.html.utils.Renderer;
import nl.tudelft.ewi.dea.security.SecurityProvider;
import com.google.inject.persist.Transactional;
import com.google.inject.servlet.RequestScoped;
@RequestScoped
@Path("login")
@Produces(MediaType.TEXT_HTML)
public class LoginPage {
private final Renderer renderer;
private final BuildInfo buildInfo;
private final SecurityProvider securityProvider;
@Inject
public LoginPage(SecurityProvider securityProvider,
Renderer renderer, BuildInfo buildInfo) {
this.securityProvider = securityProvider;
this.renderer = renderer;
this.buildInfo = buildInfo;
}
@GET
@Transactional
public Response servePage() {
if (securityProvider.getSubject().isAuthenticated()) {
return Response.seeOther(URI.create("/dashboard")).build();
}
return Response.ok(renderer
.setValue("buildInfo", buildInfo)
.render("login.tpl")).build();
}
}
|
// ... existing code ...
package nl.tudelft.ewi.dea.jaxrs.html;
import java.net.URI;
import javax.inject.Inject;
import javax.ws.rs.GET;
// ... modified code ...
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import nl.tudelft.ewi.dea.BuildInfo;
import nl.tudelft.ewi.dea.jaxrs.html.utils.Renderer;
import nl.tudelft.ewi.dea.security.SecurityProvider;
import com.google.inject.persist.Transactional;
import com.google.inject.servlet.RequestScoped;
...
private final Renderer renderer;
private final BuildInfo buildInfo;
private final SecurityProvider securityProvider;
@Inject
public LoginPage(SecurityProvider securityProvider,
Renderer renderer, BuildInfo buildInfo) {
this.securityProvider = securityProvider;
this.renderer = renderer;
this.buildInfo = buildInfo;
}
...
@GET
@Transactional
public Response servePage() {
if (securityProvider.getSubject().isAuthenticated()) {
return Response.seeOther(URI.create("/dashboard")).build();
}
return Response.ok(renderer
.setValue("buildInfo", buildInfo)
.render("login.tpl")).build();
}
}
// ... rest of the code ...
|
dee4c07a60f22bad9e04994350e61d142c73c4a5
|
detekt-rules/src/test/resources/cases/NamingConventions.kt
|
detekt-rules/src/test/resources/cases/NamingConventions.kt
|
package cases
// both valid
val variable = 5
val _variable = 5
// invalid start with _ is optional, but then lowercase!
val V_riable = 5
val _Variable = 5
//valid
fun fileMethod() {
}
//invalid
fun FileMethod() {
}
fun _fileMethod() {
}
@Suppress("unused")
class NamingConventions {
//invalid
val C_lassVariable = 5
//valid
val _classVariable = 5
val classVariable = 5
fun classMethod(){
}
//invalid
fun _classmethod(){
}
fun Classmethod(){
}
}
//invalid
class _NamingConventions{}
class namingConventions{}
object Bla {
val STUFF = "stuff"
}
|
@file:Suppress("unused")
package cases
// both valid
val variable = 5
val _variable = 5
// invalid start with _ is optional, but then lowercase!
val V_riable = 5
val _Variable = 5
//valid
fun fileMethod() {
}
//invalid
fun FileMethod() {
}
fun _fileMethod() {
}
class NamingConventions {
//invalid
val C_lassVariable = 5
//valid
val _classVariable = 5
val classVariable = 5
fun classMethod(){
}
//invalid
fun _classmethod(){
}
fun Classmethod(){
}
}
//invalid
class _NamingConventions{}
class namingConventions{}
object Bla {
val STUFF = "stuff"
}
|
Use file suppress in naming convention test case
|
Use file suppress in naming convention test case
|
Kotlin
|
apache-2.0
|
rock3r/detekt,arturbosch/detekt,Mauin/detekt,Mauin/detekt,Mauin/detekt,Mauin/detekt,rock3r/detekt,arturbosch/detekt,arturbosch/detekt,rock3r/detekt,MyDogTom/detekt
|
kotlin
|
## Code Before:
package cases
// both valid
val variable = 5
val _variable = 5
// invalid start with _ is optional, but then lowercase!
val V_riable = 5
val _Variable = 5
//valid
fun fileMethod() {
}
//invalid
fun FileMethod() {
}
fun _fileMethod() {
}
@Suppress("unused")
class NamingConventions {
//invalid
val C_lassVariable = 5
//valid
val _classVariable = 5
val classVariable = 5
fun classMethod(){
}
//invalid
fun _classmethod(){
}
fun Classmethod(){
}
}
//invalid
class _NamingConventions{}
class namingConventions{}
object Bla {
val STUFF = "stuff"
}
## Instruction:
Use file suppress in naming convention test case
## Code After:
@file:Suppress("unused")
package cases
// both valid
val variable = 5
val _variable = 5
// invalid start with _ is optional, but then lowercase!
val V_riable = 5
val _Variable = 5
//valid
fun fileMethod() {
}
//invalid
fun FileMethod() {
}
fun _fileMethod() {
}
class NamingConventions {
//invalid
val C_lassVariable = 5
//valid
val _classVariable = 5
val classVariable = 5
fun classMethod(){
}
//invalid
fun _classmethod(){
}
fun Classmethod(){
}
}
//invalid
class _NamingConventions{}
class namingConventions{}
object Bla {
val STUFF = "stuff"
}
|
# ... existing code ...
@file:Suppress("unused")
package cases
// both valid
# ... modified code ...
fun _fileMethod() {
}
class NamingConventions {
//invalid
# ... rest of the code ...
|
3fd7c331273f9fadacae1fcb0ff51b9817b009e3
|
telethon/network/connection/tcpfull.py
|
telethon/network/connection/tcpfull.py
|
import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
|
import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
async def connect(self):
await super().connect()
self._send_counter = 0 # Important or Telegram won't reply
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
|
Fix automatic reconnect (e.g. on bad auth key)
|
Fix automatic reconnect (e.g. on bad auth key)
This took more time than it should have to debug.
|
Python
|
mit
|
LonamiWebs/Telethon,expectocode/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon
|
python
|
## Code Before:
import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
## Instruction:
Fix automatic reconnect (e.g. on bad auth key)
This took more time than it should have to debug.
## Code After:
import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
async def connect(self):
await super().connect()
self._send_counter = 0 # Important or Telegram won't reply
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
|
// ... existing code ...
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
async def connect(self):
await super().connect()
self._send_counter = 0 # Important or Telegram won't reply
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
// ... rest of the code ...
|
2be9da941fbbf17a54abd79ecae80d0245c1912e
|
moulinette/utils/stream.py
|
moulinette/utils/stream.py
|
from threading import Thread
from Queue import Queue, Empty
# Read from a stream ---------------------------------------------------
class NonBlockingStreamReader:
"""A non-blocking stream reader
Open a separate thread which reads lines from the stream whenever data
becomes available and stores the data in a queue.
Based on: http://eyalarubas.com/python-subproc-nonblock.html
Keyword arguments:
- stream -- The stream to read from
"""
def __init__(self, stream):
self._s = stream
self._q = Queue()
def _populateQueue(stream, queue):
"""Collect lines from the stream and put them in the queue"""
while True:
line = stream.readline()
if line:
queue.put(line)
else:
break
self._t = Thread(target=_populateQueue, args=(self._s, self._q))
self._t.daemon = True
# Start collecting lines from the stream
self._t.start()
def readline(self, block=False, timeout=None):
"""Read line from the stream
Attempt to pull from the queue the data and return it. If no data is
available or timeout has expired, it returns None.
Keyword arguments:
- block -- If True, block if necessary until data is available
- timeout -- The number of seconds to block
"""
try:
return self._q.get(block=timeout is not None,
timeout=timeout)
except Empty:
return None
def close(self):
"""Close the stream"""
try:
self._s.close()
except IOError:
pass
|
import threading
import Queue
# Read from a stream ---------------------------------------------------
class AsynchronousFileReader(threading.Thread):
"""
Helper class to implement asynchronous reading of a file
in a separate thread. Pushes read lines on a queue to
be consumed in another thread.
Based on:
http://stefaanlippens.net/python-asynchronous-subprocess-pipe-reading
"""
def __init__(self, fd, queue):
assert isinstance(queue, Queue.Queue)
assert callable(fd.readline)
threading.Thread.__init__(self)
self._fd = fd
self._queue = queue
def run(self):
"""The body of the tread: read lines and put them on the queue."""
for line in iter(self._fd.readline, ''):
self._queue.put(line)
def eof(self):
"""Check whether there is no more content to expect."""
return not self.is_alive() and self._queue.empty()
def join(self, timeout=None, close=True):
"""Close the file and join the thread."""
if close:
self._fd.close()
threading.Thread.join(self, timeout)
def start_async_file_reading(fd):
"""Helper which instantiate and run an AsynchronousFileReader."""
queue = Queue.Queue()
reader = AsynchronousFileReader(fd, queue)
reader.start()
return (reader, queue)
|
Use a new asynchronous file reader helper
|
[ref] Use a new asynchronous file reader helper
|
Python
|
agpl-3.0
|
YunoHost/moulinette
|
python
|
## Code Before:
from threading import Thread
from Queue import Queue, Empty
# Read from a stream ---------------------------------------------------
class NonBlockingStreamReader:
"""A non-blocking stream reader
Open a separate thread which reads lines from the stream whenever data
becomes available and stores the data in a queue.
Based on: http://eyalarubas.com/python-subproc-nonblock.html
Keyword arguments:
- stream -- The stream to read from
"""
def __init__(self, stream):
self._s = stream
self._q = Queue()
def _populateQueue(stream, queue):
"""Collect lines from the stream and put them in the queue"""
while True:
line = stream.readline()
if line:
queue.put(line)
else:
break
self._t = Thread(target=_populateQueue, args=(self._s, self._q))
self._t.daemon = True
# Start collecting lines from the stream
self._t.start()
def readline(self, block=False, timeout=None):
"""Read line from the stream
Attempt to pull from the queue the data and return it. If no data is
available or timeout has expired, it returns None.
Keyword arguments:
- block -- If True, block if necessary until data is available
- timeout -- The number of seconds to block
"""
try:
return self._q.get(block=timeout is not None,
timeout=timeout)
except Empty:
return None
def close(self):
"""Close the stream"""
try:
self._s.close()
except IOError:
pass
## Instruction:
[ref] Use a new asynchronous file reader helper
## Code After:
import threading
import Queue
# Read from a stream ---------------------------------------------------
class AsynchronousFileReader(threading.Thread):
"""
Helper class to implement asynchronous reading of a file
in a separate thread. Pushes read lines on a queue to
be consumed in another thread.
Based on:
http://stefaanlippens.net/python-asynchronous-subprocess-pipe-reading
"""
def __init__(self, fd, queue):
assert isinstance(queue, Queue.Queue)
assert callable(fd.readline)
threading.Thread.__init__(self)
self._fd = fd
self._queue = queue
def run(self):
"""The body of the tread: read lines and put them on the queue."""
for line in iter(self._fd.readline, ''):
self._queue.put(line)
def eof(self):
"""Check whether there is no more content to expect."""
return not self.is_alive() and self._queue.empty()
def join(self, timeout=None, close=True):
"""Close the file and join the thread."""
if close:
self._fd.close()
threading.Thread.join(self, timeout)
def start_async_file_reading(fd):
"""Helper which instantiate and run an AsynchronousFileReader."""
queue = Queue.Queue()
reader = AsynchronousFileReader(fd, queue)
reader.start()
return (reader, queue)
|
// ... existing code ...
import threading
import Queue
# Read from a stream ---------------------------------------------------
class AsynchronousFileReader(threading.Thread):
"""
Helper class to implement asynchronous reading of a file
in a separate thread. Pushes read lines on a queue to
be consumed in another thread.
Based on:
http://stefaanlippens.net/python-asynchronous-subprocess-pipe-reading
"""
def __init__(self, fd, queue):
assert isinstance(queue, Queue.Queue)
assert callable(fd.readline)
threading.Thread.__init__(self)
self._fd = fd
self._queue = queue
def run(self):
"""The body of the tread: read lines and put them on the queue."""
for line in iter(self._fd.readline, ''):
self._queue.put(line)
def eof(self):
"""Check whether there is no more content to expect."""
return not self.is_alive() and self._queue.empty()
def join(self, timeout=None, close=True):
"""Close the file and join the thread."""
if close:
self._fd.close()
threading.Thread.join(self, timeout)
def start_async_file_reading(fd):
"""Helper which instantiate and run an AsynchronousFileReader."""
queue = Queue.Queue()
reader = AsynchronousFileReader(fd, queue)
reader.start()
return (reader, queue)
// ... rest of the code ...
|
3d5d52f7d529183bd56da43df2503a53fe3b6fc8
|
oauth2/_compat.py
|
oauth2/_compat.py
|
try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
|
try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
def b(x, encoding='ascii'):
return bytes(x, encoding)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def b(x, encoding='ascii'):
if isinstance(x, unicode):
x = x.encode(encoding)
return x
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
|
Add a 'b()' utility for forcing encoding to bytes.
|
Add a 'b()' utility for forcing encoding to bytes.
In Python2, the 'bytes()' builtin doesn't take an encoding argument.
|
Python
|
mit
|
CentricWebEstate/python-oauth2,squirro/python-oauth2,arthurian/python-oauth2,CestusMagnus/python-oauth2,joestump/python-oauth2,jackiekazil/python-oauth2,simplegeo/python-oauth2,edworboys/python-oauth2
|
python
|
## Code Before:
try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
## Instruction:
Add a 'b()' utility for forcing encoding to bytes.
In Python2, the 'bytes()' builtin doesn't take an encoding argument.
## Code After:
try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
def b(x, encoding='ascii'):
return bytes(x, encoding)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def b(x, encoding='ascii'):
if isinstance(x, unicode):
x = x.encode(encoding)
return x
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
|
// ... existing code ...
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
def b(x, encoding='ascii'):
return bytes(x, encoding)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def b(x, encoding='ascii'):
if isinstance(x, unicode):
x = x.encode(encoding)
return x
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
// ... rest of the code ...
|
b17472c86ffca7811246080cf3b4b3f3b84e36b1
|
common/src/tests/common/components/test_command.py
|
common/src/tests/common/components/test_command.py
|
import unittest
from gosa.common.components.command import *
class CommandTestCase(unittest.TestCase):
"""Docs"""
@Command(__help__="TEST")
def test_command(self):
pass
"""Docs"""
@Command()
def test_command2(self):
pass
# agent and client terms still in use in command.py
|
import unittest
from gosa.common.components.command import *
class CommandTestCase(unittest.TestCase):
@Command(__help__="TEST")
def test_command(self):
pass
@Command()
def test_command2(self):
"""Docs"""
pass
# agent and client terms still in use in command.py
|
Fix in tests: Docstring at wrong location
|
Fix in tests: Docstring at wrong location
|
Python
|
lgpl-2.1
|
gonicus/gosa,gonicus/gosa,gonicus/gosa,gonicus/gosa
|
python
|
## Code Before:
import unittest
from gosa.common.components.command import *
class CommandTestCase(unittest.TestCase):
"""Docs"""
@Command(__help__="TEST")
def test_command(self):
pass
"""Docs"""
@Command()
def test_command2(self):
pass
# agent and client terms still in use in command.py
## Instruction:
Fix in tests: Docstring at wrong location
## Code After:
import unittest
from gosa.common.components.command import *
class CommandTestCase(unittest.TestCase):
@Command(__help__="TEST")
def test_command(self):
pass
@Command()
def test_command2(self):
"""Docs"""
pass
# agent and client terms still in use in command.py
|
// ... existing code ...
from gosa.common.components.command import *
class CommandTestCase(unittest.TestCase):
@Command(__help__="TEST")
def test_command(self):
pass
@Command()
def test_command2(self):
"""Docs"""
pass
# agent and client terms still in use in command.py
// ... rest of the code ...
|
5d21ad5ae63addac0892242fe774250a2934fc87
|
awx/lib/metrics.py
|
awx/lib/metrics.py
|
from __future__ import absolute_import
import logging
from functools import wraps
from django_statsd.clients import statsd
logger = logging.getLogger(__name__)
def task_timer(fn):
@wraps(fn)
def __wrapped__(self, *args, **kwargs):
statsd.incr('tasks.{}.{}.count'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__))
with statsd.timer('tasks.{}.{}.timer'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__)):
return fn(self, *args, **kwargs)
return __wrapped__
class BaseTimer(object):
def __init__(self, name, prefix=None):
self.name = name.rsplit('.', 1)[-1]
if prefix:
self.name = '{}.{}'.format(prefix, self.name)
def __call__(self, fn):
@wraps(fn)
def __wrapped__(obj, *args, **kwargs):
statsd.incr('{}.{}.count'.format(
self.name,
fn.__name__
))
with statsd.timer('{}.{}.timer'.format(
self.name,
fn.__name__
)):
return fn(obj, *args, **kwargs)
return __wrapped__
|
from __future__ import absolute_import
import logging
from functools import wraps
from django_statsd.clients import statsd
logger = logging.getLogger(__name__)
def task_timer(fn):
@wraps(fn)
def __wrapped__(self, *args, **kwargs):
statsd.incr('tasks.{0}.{1}.count'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__))
with statsd.timer('tasks.{0}.{1}.timer'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__)):
return fn(self, *args, **kwargs)
return __wrapped__
class BaseTimer(object):
def __init__(self, name, prefix=None):
self.name = name.rsplit('.', 1)[-1]
if prefix:
self.name = '{0}.{1}'.format(prefix, self.name)
def __call__(self, fn):
@wraps(fn)
def __wrapped__(obj, *args, **kwargs):
statsd.incr('{0}.{1}.count'.format(
self.name,
fn.__name__
))
with statsd.timer('{0}.{1}.timer'.format(
self.name,
fn.__name__
)):
return fn(obj, *args, **kwargs)
return __wrapped__
|
Fix up statsd work to support python 2.6
|
Fix up statsd work to support python 2.6
Format specifiers must include field specifier
|
Python
|
apache-2.0
|
snahelou/awx,wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx
|
python
|
## Code Before:
from __future__ import absolute_import
import logging
from functools import wraps
from django_statsd.clients import statsd
logger = logging.getLogger(__name__)
def task_timer(fn):
@wraps(fn)
def __wrapped__(self, *args, **kwargs):
statsd.incr('tasks.{}.{}.count'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__))
with statsd.timer('tasks.{}.{}.timer'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__)):
return fn(self, *args, **kwargs)
return __wrapped__
class BaseTimer(object):
def __init__(self, name, prefix=None):
self.name = name.rsplit('.', 1)[-1]
if prefix:
self.name = '{}.{}'.format(prefix, self.name)
def __call__(self, fn):
@wraps(fn)
def __wrapped__(obj, *args, **kwargs):
statsd.incr('{}.{}.count'.format(
self.name,
fn.__name__
))
with statsd.timer('{}.{}.timer'.format(
self.name,
fn.__name__
)):
return fn(obj, *args, **kwargs)
return __wrapped__
## Instruction:
Fix up statsd work to support python 2.6
Format specifiers must include field specifier
## Code After:
from __future__ import absolute_import
import logging
from functools import wraps
from django_statsd.clients import statsd
logger = logging.getLogger(__name__)
def task_timer(fn):
@wraps(fn)
def __wrapped__(self, *args, **kwargs):
statsd.incr('tasks.{0}.{1}.count'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__))
with statsd.timer('tasks.{0}.{1}.timer'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__)):
return fn(self, *args, **kwargs)
return __wrapped__
class BaseTimer(object):
def __init__(self, name, prefix=None):
self.name = name.rsplit('.', 1)[-1]
if prefix:
self.name = '{0}.{1}'.format(prefix, self.name)
def __call__(self, fn):
@wraps(fn)
def __wrapped__(obj, *args, **kwargs):
statsd.incr('{0}.{1}.count'.format(
self.name,
fn.__name__
))
with statsd.timer('{0}.{1}.timer'.format(
self.name,
fn.__name__
)):
return fn(obj, *args, **kwargs)
return __wrapped__
|
# ... existing code ...
def task_timer(fn):
@wraps(fn)
def __wrapped__(self, *args, **kwargs):
statsd.incr('tasks.{0}.{1}.count'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__))
with statsd.timer('tasks.{0}.{1}.timer'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__)):
return fn(self, *args, **kwargs)
# ... modified code ...
def __init__(self, name, prefix=None):
self.name = name.rsplit('.', 1)[-1]
if prefix:
self.name = '{0}.{1}'.format(prefix, self.name)
def __call__(self, fn):
@wraps(fn)
def __wrapped__(obj, *args, **kwargs):
statsd.incr('{0}.{1}.count'.format(
self.name,
fn.__name__
))
with statsd.timer('{0}.{1}.timer'.format(
self.name,
fn.__name__
)):
# ... rest of the code ...
|
b0ae4cb386411ae8ae5fd27b19ddb415d0772cf3
|
democracy_club/apps/everyelection/forms.py
|
democracy_club/apps/everyelection/forms.py
|
from django.forms import (ModelForm, CheckboxSelectMultiple,
MultipleChoiceField)
from .models import AuthorityElection, AuthorityElectionPosition
class AuthorityAreaForm(ModelForm):
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.user = user
# import ipdb; ipdb.set_trace().
self.fields['areas'] = MultipleChoiceField(
choices=[
(a.pk, a.name) for a in self.instance.authority.child_areas],
label="Wards",
widget=CheckboxSelectMultiple
)
class Meta:
model = AuthorityElection
fields = []
def clean(self, *args, **kwargs):
for area in self.cleaned_data['areas']:
AuthorityElectionPosition.objects.get_or_create(
authority_election=self.instance,
user=self.user,
area_id=area
)
return super().clean(*args, **kwargs)
|
from django.forms import (ModelForm, CheckboxSelectMultiple,
MultipleChoiceField)
from .models import AuthorityElection, AuthorityElectionPosition
class AuthorityAreaForm(ModelForm):
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.user = user
# import ipdb; ipdb.set_trace().
self.fields['areas'] = MultipleChoiceField(
choices=[
(a.pk, a.name) for a in self.instance.authority.child_areas],
label="Wards",
widget=CheckboxSelectMultiple
)
class Meta:
model = AuthorityElection
fields = []
def clean(self, *args, **kwargs):
if 'areas' in self.cleaned_data:
for area in self.cleaned_data['areas']:
AuthorityElectionPosition.objects.get_or_create(
authority_election=self.instance,
user=self.user,
area_id=area
)
return super().clean(*args, **kwargs)
|
Check that at least one area has been checked
|
Check that at least one area has been checked
|
Python
|
bsd-3-clause
|
DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website
|
python
|
## Code Before:
from django.forms import (ModelForm, CheckboxSelectMultiple,
MultipleChoiceField)
from .models import AuthorityElection, AuthorityElectionPosition
class AuthorityAreaForm(ModelForm):
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.user = user
# import ipdb; ipdb.set_trace().
self.fields['areas'] = MultipleChoiceField(
choices=[
(a.pk, a.name) for a in self.instance.authority.child_areas],
label="Wards",
widget=CheckboxSelectMultiple
)
class Meta:
model = AuthorityElection
fields = []
def clean(self, *args, **kwargs):
for area in self.cleaned_data['areas']:
AuthorityElectionPosition.objects.get_or_create(
authority_election=self.instance,
user=self.user,
area_id=area
)
return super().clean(*args, **kwargs)
## Instruction:
Check that at least one area has been checked
## Code After:
from django.forms import (ModelForm, CheckboxSelectMultiple,
MultipleChoiceField)
from .models import AuthorityElection, AuthorityElectionPosition
class AuthorityAreaForm(ModelForm):
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.user = user
# import ipdb; ipdb.set_trace().
self.fields['areas'] = MultipleChoiceField(
choices=[
(a.pk, a.name) for a in self.instance.authority.child_areas],
label="Wards",
widget=CheckboxSelectMultiple
)
class Meta:
model = AuthorityElection
fields = []
def clean(self, *args, **kwargs):
if 'areas' in self.cleaned_data:
for area in self.cleaned_data['areas']:
AuthorityElectionPosition.objects.get_or_create(
authority_election=self.instance,
user=self.user,
area_id=area
)
return super().clean(*args, **kwargs)
|
# ... existing code ...
fields = []
def clean(self, *args, **kwargs):
if 'areas' in self.cleaned_data:
for area in self.cleaned_data['areas']:
AuthorityElectionPosition.objects.get_or_create(
authority_election=self.instance,
user=self.user,
area_id=area
)
return super().clean(*args, **kwargs)
# ... rest of the code ...
|
79fc87489595eebbc6c9f40d9d79a74af4e7494d
|
scripts/testdynamic.py
|
scripts/testdynamic.py
|
#!/usr/bin/env python
# -*- coding: utf-8
import Ice, IcePy, sys, tempfile
ice = Ice.initialize(sys.argv)
proxy = ice.stringToProxy('Meta:tcp -h 127.0.0.1 -p 6502')
slice = IcePy.Operation('getSlice', Ice.OperationMode.Idempotent, Ice.OperationMode.Idempotent, True, (), (), (), IcePy._t_string, ()).invoke(proxy, ((), None))
slicefile = tempfile.NamedTemporaryFile(suffix = '.ice')
slicefile.write(slice)
slicefile.flush()
Ice.loadSlice(slicefile.name)
slicefile.close()
import Murmur
meta = Murmur.MetaPrx.checkedCast(proxy)
print meta.getVersion()
|
#!/usr/bin/env python
# -*- coding: utf-8
import Ice, IcePy, sys, tempfile
ice = Ice.initialize(sys.argv)
proxy = ice.stringToProxy('Meta:tcp -h 127.0.0.1 -p 6502')
try:
slice = IcePy.Operation('getSlice', Ice.OperationMode.Idempotent, Ice.OperationMode.Idempotent, True, (), (), (), IcePy._t_string, ()).invoke(proxy, ((), None))
slicefile = tempfile.NamedTemporaryFile(suffix = '.ice')
slicefile.write(slice)
slicefile.flush()
Ice.loadSlice(slicefile.name)
slicefile.close()
print 'Using dynamic slice'
except:
Ice.loadSlice('Murmur.ice')
print 'Using bundled slice'
import Murmur
meta = Murmur.MetaPrx.checkedCast(proxy)
print meta.getVersion()
|
Expand dynamic slice-fetch example to show fallback
|
Expand dynamic slice-fetch example to show fallback
|
Python
|
bsd-3-clause
|
mkrautz/mumble-sbcelt,austinliou/mumble,unascribed/mumble,LuAPi/mumble,Githlar/mumble,chiefdome/mumble-code,panaschieren/mumble-test,ccpgames/mumble,unascribed/mumble,Zopieux/mumble,SuperNascher/mumble,Zopieux/mumble,Lartza/mumble,feld/mumble,feld/mumble,arrai/mumble-record,Lartza/mumble,unascribed/mumble,ccpgames/mumble,chancegarcia/mumble,Zopieux/mumble,feld/mumble,bheart/mumble,Keridos/mumble,chancegarcia/mumble,mbax/mumble,unascribed/mumble,Githlar/mumble,mkrautz/mumble-sbcelt,LuAPi/mumble,arrai/mumble-record,austinliou/mumble,Keridos/mumble,niko20010/mumble,richard227/mumble,mbax/mumble,panaschieren/mumble-test,austinliou/mumble,LuAPi/mumble,arrai/mumble-record,austinliou/mumble,Natenom/mumble,Zopieux/mumble,niko20010/mumble,SuperNascher/mumble,bheart/mumble,Githlar/mumble,Natenom/mumble,chancegarcia/mumble,bheart/mumble,mkrautz/mumble-sbcelt,SuperNascher/mumble,bheart/mumble,LuAPi/mumble,Githlar/mumble,richard227/mumble,feld/mumble,SuperNascher/mumble,Lartza/mumble,LuAPi/mumble,Lartza/mumble,mbax/mumble,Natenom/mumble,Zopieux/mumble,niko20010/mumble,Lartza/mumble,mkrautz/mumble-sbcelt,SuperNascher/mumble,mbax/mumble,LuAPi/mumble,arrai/mumble-record,Zopieux/mumble,chancegarcia/mumble,arrai/mumble-record,panaschieren/mumble-test,mkrautz/mumble-sbcelt,Keridos/mumble,Githlar/mumble,richard227/mumble,unascribed/mumble,niko20010/mumble,chiefdome/mumble-code,mkrautz/mumble-sbcelt,ccpgames/mumble,niko20010/mumble,unascribed/mumble,Keridos/mumble,Lartza/mumble,niko20010/mumble,mkrautz/mumble-sbcelt,niko20010/mumble,chiefdome/mumble-code,Githlar/mumble,LuAPi/mumble,mbax/mumble,feld/mumble,Keridos/mumble,LuAPi/mumble,chiefdome/mumble-code,arrai/mumble-record,unascribed/mumble,austinliou/mumble,ccpgames/mumble,Zopieux/mumble,Natenom/mumble,SuperNascher/mumble,Natenom/mumble,chiefdome/mumble-code,Keridos/mumble,austinliou/mumble,Natenom/mumble,feld/mumble,mbax/mumble,SuperNascher/mumble,chiefdome/mumble-code,arrai/mumble-record,panaschieren/mumble-test,Keridos/mumble,SuperNascher/mumble,SuperNascher/mumble,ccpgames/mumble,chancegarcia/mumble,richard227/mumble,panaschieren/mumble-test,ccpgames/mumble,arrai/mumble-record,bheart/mumble,mkrautz/mumble-sbcelt,Lartza/mumble,feld/mumble,unascribed/mumble,ccpgames/mumble,Githlar/mumble,panaschieren/mumble-test,panaschieren/mumble-test,richard227/mumble,feld/mumble,chancegarcia/mumble,chancegarcia/mumble,chiefdome/mumble-code,bheart/mumble,chancegarcia/mumble,ccpgames/mumble,mbax/mumble,bheart/mumble,richard227/mumble,Keridos/mumble,mbax/mumble,Zopieux/mumble,niko20010/mumble,bheart/mumble,Lartza/mumble,chiefdome/mumble-code,LuAPi/mumble,panaschieren/mumble-test,chancegarcia/mumble,richard227/mumble,austinliou/mumble,Natenom/mumble,Githlar/mumble,Natenom/mumble,austinliou/mumble,richard227/mumble
|
python
|
## Code Before:
#!/usr/bin/env python
# -*- coding: utf-8
import Ice, IcePy, sys, tempfile
ice = Ice.initialize(sys.argv)
proxy = ice.stringToProxy('Meta:tcp -h 127.0.0.1 -p 6502')
slice = IcePy.Operation('getSlice', Ice.OperationMode.Idempotent, Ice.OperationMode.Idempotent, True, (), (), (), IcePy._t_string, ()).invoke(proxy, ((), None))
slicefile = tempfile.NamedTemporaryFile(suffix = '.ice')
slicefile.write(slice)
slicefile.flush()
Ice.loadSlice(slicefile.name)
slicefile.close()
import Murmur
meta = Murmur.MetaPrx.checkedCast(proxy)
print meta.getVersion()
## Instruction:
Expand dynamic slice-fetch example to show fallback
## Code After:
#!/usr/bin/env python
# -*- coding: utf-8
import Ice, IcePy, sys, tempfile
ice = Ice.initialize(sys.argv)
proxy = ice.stringToProxy('Meta:tcp -h 127.0.0.1 -p 6502')
try:
slice = IcePy.Operation('getSlice', Ice.OperationMode.Idempotent, Ice.OperationMode.Idempotent, True, (), (), (), IcePy._t_string, ()).invoke(proxy, ((), None))
slicefile = tempfile.NamedTemporaryFile(suffix = '.ice')
slicefile.write(slice)
slicefile.flush()
Ice.loadSlice(slicefile.name)
slicefile.close()
print 'Using dynamic slice'
except:
Ice.loadSlice('Murmur.ice')
print 'Using bundled slice'
import Murmur
meta = Murmur.MetaPrx.checkedCast(proxy)
print meta.getVersion()
|
...
ice = Ice.initialize(sys.argv)
proxy = ice.stringToProxy('Meta:tcp -h 127.0.0.1 -p 6502')
try:
slice = IcePy.Operation('getSlice', Ice.OperationMode.Idempotent, Ice.OperationMode.Idempotent, True, (), (), (), IcePy._t_string, ()).invoke(proxy, ((), None))
slicefile = tempfile.NamedTemporaryFile(suffix = '.ice')
slicefile.write(slice)
slicefile.flush()
Ice.loadSlice(slicefile.name)
slicefile.close()
print 'Using dynamic slice'
except:
Ice.loadSlice('Murmur.ice')
print 'Using bundled slice'
import Murmur
...
|
c2b0f66d5760d61444b4909e40c45993780cd473
|
examples/champion.py
|
examples/champion.py
|
import cassiopeia as cass
from cassiopeia.core import Champion
def test_cass():
#annie = Champion(name="Annie", region="NA")
annie = Champion(name="Annie")
print(annie.name)
print(annie.title)
print(annie.title)
for spell in annie.spells:
print(spell.name, spell.keywords)
print(annie.info.difficulty)
print(annie.passive.name)
#print(annie.recommended_itemsets[0].item_sets[0].items)
print(annie.free_to_play)
print(annie._Ghost__all_loaded)
print(annie)
return
print()
#ziggs = cass.get_champion(region="NA", "Ziggs")
ziggs = cass.get_champion("Renekton")
print(ziggs.name)
print(ziggs.region)
#print(ziggs.recommended_itemset[0].item_sets[0].items)
print(ziggs.free_to_play)
for spell in ziggs.spells:
for var in spell.variables:
print(spell.name, var)
print(ziggs._Ghost__all_loaded)
if __name__ == "__main__":
test_cass()
|
import cassiopeia as cass
from cassiopeia.core import Champion
def test_cass():
#annie = Champion(name="Annie", region="NA")
annie = Champion(name="Annie")
print(annie.name)
print(annie.title)
print(annie.title)
for spell in annie.spells:
print(spell.name, spell.keywords)
print(annie.info.difficulty)
print(annie.passive.name)
#print(annie.recommended_itemsets[0].item_sets[0].items)
print(annie.free_to_play)
print(annie._Ghost__all_loaded)
print(annie)
print()
#ziggs = cass.get_champion(region="NA", "Ziggs")
ziggs = cass.get_champion("Ziggs")
print(ziggs.name)
print(ziggs.region)
#print(ziggs.recommended_itemset[0].item_sets[0].items)
print(ziggs.free_to_play)
for spell in ziggs.spells:
for var in spell.variables:
print(spell.name, var)
print(ziggs._Ghost__all_loaded)
if __name__ == "__main__":
test_cass()
|
Remove `return`, get Ziggs instead of Renekton, since we're saving as Ziggs
|
Remove `return`, get Ziggs instead of Renekton, since we're saving as Ziggs
|
Python
|
mit
|
robrua/cassiopeia,meraki-analytics/cassiopeia,10se1ucgo/cassiopeia
|
python
|
## Code Before:
import cassiopeia as cass
from cassiopeia.core import Champion
def test_cass():
#annie = Champion(name="Annie", region="NA")
annie = Champion(name="Annie")
print(annie.name)
print(annie.title)
print(annie.title)
for spell in annie.spells:
print(spell.name, spell.keywords)
print(annie.info.difficulty)
print(annie.passive.name)
#print(annie.recommended_itemsets[0].item_sets[0].items)
print(annie.free_to_play)
print(annie._Ghost__all_loaded)
print(annie)
return
print()
#ziggs = cass.get_champion(region="NA", "Ziggs")
ziggs = cass.get_champion("Renekton")
print(ziggs.name)
print(ziggs.region)
#print(ziggs.recommended_itemset[0].item_sets[0].items)
print(ziggs.free_to_play)
for spell in ziggs.spells:
for var in spell.variables:
print(spell.name, var)
print(ziggs._Ghost__all_loaded)
if __name__ == "__main__":
test_cass()
## Instruction:
Remove `return`, get Ziggs instead of Renekton, since we're saving as Ziggs
## Code After:
import cassiopeia as cass
from cassiopeia.core import Champion
def test_cass():
#annie = Champion(name="Annie", region="NA")
annie = Champion(name="Annie")
print(annie.name)
print(annie.title)
print(annie.title)
for spell in annie.spells:
print(spell.name, spell.keywords)
print(annie.info.difficulty)
print(annie.passive.name)
#print(annie.recommended_itemsets[0].item_sets[0].items)
print(annie.free_to_play)
print(annie._Ghost__all_loaded)
print(annie)
print()
#ziggs = cass.get_champion(region="NA", "Ziggs")
ziggs = cass.get_champion("Ziggs")
print(ziggs.name)
print(ziggs.region)
#print(ziggs.recommended_itemset[0].item_sets[0].items)
print(ziggs.free_to_play)
for spell in ziggs.spells:
for var in spell.variables:
print(spell.name, var)
print(ziggs._Ghost__all_loaded)
if __name__ == "__main__":
test_cass()
|
# ... existing code ...
print(annie.free_to_play)
print(annie._Ghost__all_loaded)
print(annie)
print()
#ziggs = cass.get_champion(region="NA", "Ziggs")
ziggs = cass.get_champion("Ziggs")
print(ziggs.name)
print(ziggs.region)
#print(ziggs.recommended_itemset[0].item_sets[0].items)
# ... rest of the code ...
|
5c074950663d2e508fee0e015472e8460bf5b183
|
rootpy/plotting/canvas.py
|
rootpy/plotting/canvas.py
|
import ctypes, ctypes.util
ctypes.cdll.LoadLibrary(ctypes.util.find_library("Gui"))
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
|
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
|
Remove code which should never have made it in
|
Remove code which should never have made it in
|
Python
|
bsd-3-clause
|
rootpy/rootpy,kreczko/rootpy,kreczko/rootpy,kreczko/rootpy,rootpy/rootpy,ndawe/rootpy,rootpy/rootpy,ndawe/rootpy,ndawe/rootpy
|
python
|
## Code Before:
import ctypes, ctypes.util
ctypes.cdll.LoadLibrary(ctypes.util.find_library("Gui"))
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
## Instruction:
Remove code which should never have made it in
## Code After:
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
|
// ... existing code ...
import ROOT
// ... rest of the code ...
|
e1e046b87d4ba577e8325434f39cf2241a268be2
|
src/test/java/io/iron/ironmq/IronMQTest.java
|
src/test/java/io/iron/ironmq/IronMQTest.java
|
package io.iron.ironmq;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
public class IronMQTest {
private String projectId;
private String token;
@Before public void setup() {
projectId = System.getenv("IRON_PROJECT_ID");
token = System.getenv("IRON_TOKEN");
Assume.assumeTrue(projectId != null && token != null);
}
@Test public void testClient() throws IOException {
Client c = new Client(projectId, token, Cloud.ironAWSUSEast);
Queue q = c.queue("test-queue");
q.clear();
Assert.assertEquals(0, q.getSize());
final String body = "Hello, IronMQ!";
String id = q.push(body);
Assert.assertEquals(1, q.getSize());
Message msg = q.get();
Assert.assertEquals(body, msg.getBody());
Assert.assertEquals(id, msg.getId());
q.deleteMessage(msg);
}
@Test(expected=HTTPException.class) public void testErrorResponse() throws IOException {
// intentionally invalid project/token combination
Client c = new Client("4444444444444", "aaaaaa", Cloud.ironAWSUSEast);
Queue q = c.queue("test-queue");
q.push("test");
}
}
|
package io.iron.ironmq;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
public class IronMQTest {
@Test public void testClient() throws IOException {
Client c = new Client();
Queue q = c.queue("test-queue");
q.clear();
Assert.assertEquals(0, q.getSize());
final String body = "Hello, IronMQ!";
String id = q.push(body);
Assert.assertEquals(1, q.getSize());
Message msg = q.get();
Assert.assertEquals(body, msg.getBody());
Assert.assertEquals(id, msg.getId());
q.deleteMessage(msg);
}
@Test(expected=HTTPException.class) public void testErrorResponse() throws IOException {
// intentionally invalid project/token combination
Client c = new Client("4444444444444", "aaaaaa", Cloud.ironAWSUSEast);
Queue q = c.queue("test-queue");
q.push("test");
}
}
|
Make test use configuration logic
|
Make test use configuration logic
|
Java
|
bsd-2-clause
|
iron-io/iron_mq_java,iron-io/iron_mq_java
|
java
|
## Code Before:
package io.iron.ironmq;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
public class IronMQTest {
private String projectId;
private String token;
@Before public void setup() {
projectId = System.getenv("IRON_PROJECT_ID");
token = System.getenv("IRON_TOKEN");
Assume.assumeTrue(projectId != null && token != null);
}
@Test public void testClient() throws IOException {
Client c = new Client(projectId, token, Cloud.ironAWSUSEast);
Queue q = c.queue("test-queue");
q.clear();
Assert.assertEquals(0, q.getSize());
final String body = "Hello, IronMQ!";
String id = q.push(body);
Assert.assertEquals(1, q.getSize());
Message msg = q.get();
Assert.assertEquals(body, msg.getBody());
Assert.assertEquals(id, msg.getId());
q.deleteMessage(msg);
}
@Test(expected=HTTPException.class) public void testErrorResponse() throws IOException {
// intentionally invalid project/token combination
Client c = new Client("4444444444444", "aaaaaa", Cloud.ironAWSUSEast);
Queue q = c.queue("test-queue");
q.push("test");
}
}
## Instruction:
Make test use configuration logic
## Code After:
package io.iron.ironmq;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
public class IronMQTest {
@Test public void testClient() throws IOException {
Client c = new Client();
Queue q = c.queue("test-queue");
q.clear();
Assert.assertEquals(0, q.getSize());
final String body = "Hello, IronMQ!";
String id = q.push(body);
Assert.assertEquals(1, q.getSize());
Message msg = q.get();
Assert.assertEquals(body, msg.getBody());
Assert.assertEquals(id, msg.getId());
q.deleteMessage(msg);
}
@Test(expected=HTTPException.class) public void testErrorResponse() throws IOException {
// intentionally invalid project/token combination
Client c = new Client("4444444444444", "aaaaaa", Cloud.ironAWSUSEast);
Queue q = c.queue("test-queue");
q.push("test");
}
}
|
// ... existing code ...
import org.junit.Test;
public class IronMQTest {
@Test public void testClient() throws IOException {
Client c = new Client();
Queue q = c.queue("test-queue");
q.clear();
// ... rest of the code ...
|
e42d34e2e3163488daff15c5b584d5f3757d162f
|
unit_test/memory_unit_test.py
|
unit_test/memory_unit_test.py
|
import memory
import head
# import write_heads
from keras import backend as K
number_of_memory_locations = 6
memory_vector_size = 3
memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
read_vector = head.reading(memory_t, weight_t)
print memory_t.shape
print weight_t.shape
print read_vector
|
from keras import backend as K
import theano.tensor as T
import theano
import memory
import head
#
# number_of_memory_locations = 6
# memory_vector_size = 3
#
# memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
#
# weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
#
# read_vector = head.reading(memory_t, weight_t)
#
# print memory_t.shape
# print weight_t.shape
# print read_vector
#
def logistic(x):
s = 1 / (1 + K.exp(x))
log = theano.function([x], s)
return log
# return s
x = [[0, 1], [-1, -2]]
print logistic(x)
|
Update code of NTM based on Keras.
|
Update code of NTM based on Keras.
|
Python
|
mit
|
SigmaQuan/NTM-Keras
|
python
|
## Code Before:
import memory
import head
# import write_heads
from keras import backend as K
number_of_memory_locations = 6
memory_vector_size = 3
memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
read_vector = head.reading(memory_t, weight_t)
print memory_t.shape
print weight_t.shape
print read_vector
## Instruction:
Update code of NTM based on Keras.
## Code After:
from keras import backend as K
import theano.tensor as T
import theano
import memory
import head
#
# number_of_memory_locations = 6
# memory_vector_size = 3
#
# memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
#
# weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
#
# read_vector = head.reading(memory_t, weight_t)
#
# print memory_t.shape
# print weight_t.shape
# print read_vector
#
def logistic(x):
s = 1 / (1 + K.exp(x))
log = theano.function([x], s)
return log
# return s
x = [[0, 1], [-1, -2]]
print logistic(x)
|
# ... existing code ...
from keras import backend as K
import theano.tensor as T
import theano
import memory
import head
#
# number_of_memory_locations = 6
# memory_vector_size = 3
#
# memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
#
# weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
#
# read_vector = head.reading(memory_t, weight_t)
#
# print memory_t.shape
# print weight_t.shape
# print read_vector
#
def logistic(x):
s = 1 / (1 + K.exp(x))
log = theano.function([x], s)
return log
# return s
x = [[0, 1], [-1, -2]]
print logistic(x)
# ... rest of the code ...
|
db24629b7cc34f9a137c6bc5569dc7a39245fa52
|
thinglang/compiler/sentinels.py
|
thinglang/compiler/sentinels.py
|
from thinglang.compiler.opcodes import MEMBERS, METHODS, FRAME_SIZE, ARGUMENTS
from thinglang.compiler.opcodes import Opcode
class SentinelThingDefinition(Opcode):
"""
Signifies the start of thing definition
"""
ARGS = MEMBERS, METHODS
class SentinelMethodDefinition(Opcode):
"""
Signifies a method definition boundary.
"""
ARGS = FRAME_SIZE, ARGUMENTS
class SentinelMethodEnd(Opcode):
"""
Signifies a method boundary.
"""
pass
class SentinelCodeEnd(Opcode):
"""
Signifies the code section boundary.
"""
pass
class SentinelDataEnd(Opcode):
"""
Signifies the code section boundary.
"""
pass
|
from thinglang.compiler.opcodes import MEMBERS, METHODS, FRAME_SIZE, ARGUMENTS
from thinglang.compiler.opcodes import Opcode
class SentinelImportTableEntry(Opcode):
"""
Signifies an import table entry
"""
class SentinelImportTableEnd(Opcode):
"""
Signifies the end of the import table
"""
class SentinelThingDefinition(Opcode):
"""
Signifies the start of thing definition
"""
ARGS = MEMBERS, METHODS
class SentinelMethodDefinition(Opcode):
"""
Signifies a method definition boundary.
"""
ARGS = FRAME_SIZE, ARGUMENTS
class SentinelMethodEnd(Opcode):
"""
Signifies a method boundary.
"""
pass
class SentinelCodeEnd(Opcode):
"""
Signifies the code section boundary.
"""
pass
class SentinelDataEnd(Opcode):
"""
Signifies the code section boundary.
"""
pass
|
Add serialization sentintels for import table
|
Add serialization sentintels for import table
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
python
|
## Code Before:
from thinglang.compiler.opcodes import MEMBERS, METHODS, FRAME_SIZE, ARGUMENTS
from thinglang.compiler.opcodes import Opcode
class SentinelThingDefinition(Opcode):
"""
Signifies the start of thing definition
"""
ARGS = MEMBERS, METHODS
class SentinelMethodDefinition(Opcode):
"""
Signifies a method definition boundary.
"""
ARGS = FRAME_SIZE, ARGUMENTS
class SentinelMethodEnd(Opcode):
"""
Signifies a method boundary.
"""
pass
class SentinelCodeEnd(Opcode):
"""
Signifies the code section boundary.
"""
pass
class SentinelDataEnd(Opcode):
"""
Signifies the code section boundary.
"""
pass
## Instruction:
Add serialization sentintels for import table
## Code After:
from thinglang.compiler.opcodes import MEMBERS, METHODS, FRAME_SIZE, ARGUMENTS
from thinglang.compiler.opcodes import Opcode
class SentinelImportTableEntry(Opcode):
"""
Signifies an import table entry
"""
class SentinelImportTableEnd(Opcode):
"""
Signifies the end of the import table
"""
class SentinelThingDefinition(Opcode):
"""
Signifies the start of thing definition
"""
ARGS = MEMBERS, METHODS
class SentinelMethodDefinition(Opcode):
"""
Signifies a method definition boundary.
"""
ARGS = FRAME_SIZE, ARGUMENTS
class SentinelMethodEnd(Opcode):
"""
Signifies a method boundary.
"""
pass
class SentinelCodeEnd(Opcode):
"""
Signifies the code section boundary.
"""
pass
class SentinelDataEnd(Opcode):
"""
Signifies the code section boundary.
"""
pass
|
# ... existing code ...
from thinglang.compiler.opcodes import MEMBERS, METHODS, FRAME_SIZE, ARGUMENTS
from thinglang.compiler.opcodes import Opcode
class SentinelImportTableEntry(Opcode):
"""
Signifies an import table entry
"""
class SentinelImportTableEnd(Opcode):
"""
Signifies the end of the import table
"""
class SentinelThingDefinition(Opcode):
# ... rest of the code ...
|
9dd4da3d62312c5184150a967f7e4a3935c7b94e
|
moksha/tests/test_clientsockets.py
|
moksha/tests/test_clientsockets.py
|
import webtest
import moksha.tests.utils as testutils
from moksha.api.widgets.live import get_moksha_socket
from moksha.middleware import make_moksha_middleware
from tw2.core import make_middleware as make_tw2_middleware
class TestClientSocketDumb:
def _setUp(self):
def kernel(config):
def app(environ, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
socket = get_moksha_socket(config)
return map(str, [socket.display()])
app = make_moksha_middleware(app, config)
app = make_tw2_middleware(app, config)
app = webtest.TestApp(app)
self.app = app
for _setup, name in testutils.make_setup_functions(kernel):
yield _setup, name
def _tearDown(self):
pass
@testutils.crosstest
def test_middleware_wrap(self):
targets = ['moksha_websocket', 'TCPSocket']
response = self.app.get('/')
assert(any([target in response for target in targets]))
|
import webtest
import moksha.tests.utils as testutils
from moksha.api.widgets.live import get_moksha_socket
from moksha.middleware import make_moksha_middleware
from tw2.core import make_middleware as make_tw2_middleware
class TestClientSocketDumb:
def _setUp(self):
def kernel(config):
def app(environ, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
socket = get_moksha_socket(config)
return map(str, [socket.display()])
app = make_moksha_middleware(app, config)
app = make_tw2_middleware(app, config)
app = webtest.TestApp(app)
self.app = app
for _setup, name in testutils.make_setup_functions(kernel):
yield _setup, name
def _tearDown(self):
pass
@testutils.crosstest
def test_has_socket_str(self):
targets = ['moksha_websocket', 'TCPSocket']
response = self.app.get('/')
assert(any([target in response for target in targets]))
|
Rename test. Fix copy/pasta forgetfulness.
|
Rename test. Fix copy/pasta forgetfulness.
|
Python
|
apache-2.0
|
pombredanne/moksha,mokshaproject/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha,mokshaproject/moksha,mokshaproject/moksha,pombredanne/moksha
|
python
|
## Code Before:
import webtest
import moksha.tests.utils as testutils
from moksha.api.widgets.live import get_moksha_socket
from moksha.middleware import make_moksha_middleware
from tw2.core import make_middleware as make_tw2_middleware
class TestClientSocketDumb:
def _setUp(self):
def kernel(config):
def app(environ, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
socket = get_moksha_socket(config)
return map(str, [socket.display()])
app = make_moksha_middleware(app, config)
app = make_tw2_middleware(app, config)
app = webtest.TestApp(app)
self.app = app
for _setup, name in testutils.make_setup_functions(kernel):
yield _setup, name
def _tearDown(self):
pass
@testutils.crosstest
def test_middleware_wrap(self):
targets = ['moksha_websocket', 'TCPSocket']
response = self.app.get('/')
assert(any([target in response for target in targets]))
## Instruction:
Rename test. Fix copy/pasta forgetfulness.
## Code After:
import webtest
import moksha.tests.utils as testutils
from moksha.api.widgets.live import get_moksha_socket
from moksha.middleware import make_moksha_middleware
from tw2.core import make_middleware as make_tw2_middleware
class TestClientSocketDumb:
def _setUp(self):
def kernel(config):
def app(environ, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
socket = get_moksha_socket(config)
return map(str, [socket.display()])
app = make_moksha_middleware(app, config)
app = make_tw2_middleware(app, config)
app = webtest.TestApp(app)
self.app = app
for _setup, name in testutils.make_setup_functions(kernel):
yield _setup, name
def _tearDown(self):
pass
@testutils.crosstest
def test_has_socket_str(self):
targets = ['moksha_websocket', 'TCPSocket']
response = self.app.get('/')
assert(any([target in response for target in targets]))
|
...
pass
@testutils.crosstest
def test_has_socket_str(self):
targets = ['moksha_websocket', 'TCPSocket']
response = self.app.get('/')
assert(any([target in response for target in targets]))
...
|
7ec163011151826adc7ed23352250ce4accc2747
|
jre/javatests/com/google/gwt/emultest/AllTests.java
|
jre/javatests/com/google/gwt/emultest/AllTests.java
|
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.emultest;
import junit.framework.Test;
import junit.framework.TestSuite;
/** TestSuite for all of GWT's emul suites. */
public class AllTests {
public static Test suite() {
TestSuite suite = new TestSuite("All Emul tests");
suite.addTest(BigDecimalSuite.suite());
suite.addTest(BigIntegerSuite.suite());
suite.addTest(CollectionsSuite.suite());
suite.addTest(EmulSuite.suite());
suite.addTest(EmulJava8Suite.suite());
suite.addTest(TreeMapSuiteSub.suite());
suite.addTest(TreeSetSuiteSub.suite());
return suite;
}
}
|
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.emultest;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
/** TestSuite for all of GWT's emul suites. */
@RunWith(Suite.class)
@SuiteClasses({
BigDecimalSuite.class,
BigIntegerSuite.class,
CollectionsSuite.class,
EmulSuite.class,
EmulJava8Suite.class,
})
public class AllTests {}
|
Use @RunWith(Suite.class) for emul test suites.
|
Import: Use @RunWith(Suite.class) for emul test suites.
...and update J2CL AllTests accordingly.
I will delete duplicated J2CL suites in another CL.
commit 032e49ff8d12170b00ac3232174f365df0182f4e
Author: Goktug Gokdogan <[email protected]>
Date: Wed Apr 12 20:06:35 2017 -0700
Use @RunWith(Suite.class) for emul test suites.
Change-Id: I2fc94ccf4298714dfee8ba9628f9137e30cdb8a0
PiperOrigin-RevId: 153108807
|
Java
|
apache-2.0
|
google/j2cl,google/j2cl,google/j2cl,google/j2cl,google/j2cl
|
java
|
## Code Before:
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.emultest;
import junit.framework.Test;
import junit.framework.TestSuite;
/** TestSuite for all of GWT's emul suites. */
public class AllTests {
public static Test suite() {
TestSuite suite = new TestSuite("All Emul tests");
suite.addTest(BigDecimalSuite.suite());
suite.addTest(BigIntegerSuite.suite());
suite.addTest(CollectionsSuite.suite());
suite.addTest(EmulSuite.suite());
suite.addTest(EmulJava8Suite.suite());
suite.addTest(TreeMapSuiteSub.suite());
suite.addTest(TreeSetSuiteSub.suite());
return suite;
}
}
## Instruction:
Import: Use @RunWith(Suite.class) for emul test suites.
...and update J2CL AllTests accordingly.
I will delete duplicated J2CL suites in another CL.
commit 032e49ff8d12170b00ac3232174f365df0182f4e
Author: Goktug Gokdogan <[email protected]>
Date: Wed Apr 12 20:06:35 2017 -0700
Use @RunWith(Suite.class) for emul test suites.
Change-Id: I2fc94ccf4298714dfee8ba9628f9137e30cdb8a0
PiperOrigin-RevId: 153108807
## Code After:
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.emultest;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
/** TestSuite for all of GWT's emul suites. */
@RunWith(Suite.class)
@SuiteClasses({
BigDecimalSuite.class,
BigIntegerSuite.class,
CollectionsSuite.class,
EmulSuite.class,
EmulJava8Suite.class,
})
public class AllTests {}
|
// ... existing code ...
*/
package com.google.gwt.emultest;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
/** TestSuite for all of GWT's emul suites. */
@RunWith(Suite.class)
@SuiteClasses({
BigDecimalSuite.class,
BigIntegerSuite.class,
CollectionsSuite.class,
EmulSuite.class,
EmulJava8Suite.class,
})
public class AllTests {}
// ... rest of the code ...
|
13ba4fba90f6ff654c26daf4a44d77bda3992b1f
|
model/__init__.py
|
model/__init__.py
|
import model.wu.user
from model.wu.user import User
def init_context(app):
model.wu.user.init_context(app)
# todo evaluate a parameter and decide which package to use (wu, hss, test(?))
|
import os
model_name = os.getenv('SIPA_MODEL', 'sample')
module = __import__('{}.{}.user'.format(__name__, model_name),
fromlist='{}.{}'.format(__name__, model_name))
init_context = module.init_context
User = module.User
query_gauge_data = module.query_gauge_data
|
Load model dynamically via envvar 'SIPA_MODEL'
|
Load model dynamically via envvar 'SIPA_MODEL'
|
Python
|
mit
|
lukasjuhrich/sipa,agdsn/sipa,agdsn/sipa,fgrsnau/sipa,agdsn/sipa,agdsn/sipa,MarauderXtreme/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,fgrsnau/sipa,fgrsnau/sipa,MarauderXtreme/sipa,MarauderXtreme/sipa
|
python
|
## Code Before:
import model.wu.user
from model.wu.user import User
def init_context(app):
model.wu.user.init_context(app)
# todo evaluate a parameter and decide which package to use (wu, hss, test(?))
## Instruction:
Load model dynamically via envvar 'SIPA_MODEL'
## Code After:
import os
model_name = os.getenv('SIPA_MODEL', 'sample')
module = __import__('{}.{}.user'.format(__name__, model_name),
fromlist='{}.{}'.format(__name__, model_name))
init_context = module.init_context
User = module.User
query_gauge_data = module.query_gauge_data
|
// ... existing code ...
import os
model_name = os.getenv('SIPA_MODEL', 'sample')
module = __import__('{}.{}.user'.format(__name__, model_name),
fromlist='{}.{}'.format(__name__, model_name))
init_context = module.init_context
User = module.User
query_gauge_data = module.query_gauge_data
// ... rest of the code ...
|
f8b35e2a0cf092441efe1350871814fd347d3627
|
tests/classifier/LinearSVC/LinearSVCJavaTest.py
|
tests/classifier/LinearSVC/LinearSVCJavaTest.py
|
from unittest import TestCase
from sklearn.svm.classes import LinearSVC
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
|
from unittest import TestCase
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA, NMF
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.svm.classes import LinearSVC
from sklearn_porter import Porter
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
def test_model_within_optimizer(self):
pipe = Pipeline([
('reduce_dim', PCA()),
('classify', LinearSVC())
])
n_features_options = [2, 4, 8]
c_options = [1, 10, 100, 1000]
param_grid = [
{
'reduce_dim': [PCA(iterated_power=7), NMF()],
'reduce_dim__n_components': n_features_options,
'classify__C': c_options
},
{
'reduce_dim': [SelectKBest(chi2)],
'reduce_dim__k': n_features_options,
'classify__C': c_options
},
]
grid = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid)
digits = load_digits()
grid.fit(digits.data, digits.target)
try:
Porter(grid, language='java')
except ValueError:
self.assertTrue(False)
else:
self.assertTrue(True)
|
Add test for using optimizers
|
Add test for using optimizers
|
Python
|
bsd-3-clause
|
nok/sklearn-porter
|
python
|
## Code Before:
from unittest import TestCase
from sklearn.svm.classes import LinearSVC
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
## Instruction:
Add test for using optimizers
## Code After:
from unittest import TestCase
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA, NMF
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.svm.classes import LinearSVC
from sklearn_porter import Porter
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
def test_model_within_optimizer(self):
pipe = Pipeline([
('reduce_dim', PCA()),
('classify', LinearSVC())
])
n_features_options = [2, 4, 8]
c_options = [1, 10, 100, 1000]
param_grid = [
{
'reduce_dim': [PCA(iterated_power=7), NMF()],
'reduce_dim__n_components': n_features_options,
'classify__C': c_options
},
{
'reduce_dim': [SelectKBest(chi2)],
'reduce_dim__k': n_features_options,
'classify__C': c_options
},
]
grid = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid)
digits = load_digits()
grid.fit(digits.data, digits.target)
try:
Porter(grid, language='java')
except ValueError:
self.assertTrue(False)
else:
self.assertTrue(True)
|
// ... existing code ...
from unittest import TestCase
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA, NMF
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.svm.classes import LinearSVC
from sklearn_porter import Porter
from ..Classifier import Classifier
from ...language.Java import Java
// ... modified code ...
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
def test_model_within_optimizer(self):
pipe = Pipeline([
('reduce_dim', PCA()),
('classify', LinearSVC())
])
n_features_options = [2, 4, 8]
c_options = [1, 10, 100, 1000]
param_grid = [
{
'reduce_dim': [PCA(iterated_power=7), NMF()],
'reduce_dim__n_components': n_features_options,
'classify__C': c_options
},
{
'reduce_dim': [SelectKBest(chi2)],
'reduce_dim__k': n_features_options,
'classify__C': c_options
},
]
grid = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid)
digits = load_digits()
grid.fit(digits.data, digits.target)
try:
Porter(grid, language='java')
except ValueError:
self.assertTrue(False)
else:
self.assertTrue(True)
// ... rest of the code ...
|
c0b76d401b305c1bcd2ed5814a89719d4c6a3d83
|
heat_cfnclient/tests/test_cli.py
|
heat_cfnclient/tests/test_cli.py
|
import testtools
import heat_cfnclient
import os
import subprocess
basepath = os.path.join(heat_cfnclient.__path__[0], os.path.pardir)
class CliTest(testtools.TestCase):
def test_heat_cfn(self):
self.bin_run('heat-cfn')
def test_heat_boto(self):
self.bin_run('heat-boto')
def test_heat_watch(self):
self.bin_run('heat-watch')
def bin_run(self, bin):
fullpath = basepath + '/bin/' + bin
proc = subprocess.Popen(fullpath,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if proc.returncode:
print('Error executing %s:\n %s %s ' % (bin, stdout, stderr))
raise subprocess.CalledProcessError(proc.returncode, bin)
|
import testtools
import heat_cfnclient
import os
import subprocess
basepath = os.path.join(heat_cfnclient.__path__[0], os.path.pardir)
@testtools.skip
class CliTest(testtools.TestCase):
def test_heat_cfn(self):
self.bin_run('heat-cfn')
def test_heat_boto(self):
self.bin_run('heat-boto')
def test_heat_watch(self):
self.bin_run('heat-watch')
def bin_run(self, bin):
fullpath = basepath + '/bin/' + bin
proc = subprocess.Popen(fullpath,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if proc.returncode:
print('Error executing %s:\n %s %s ' % (bin, stdout, stderr))
raise subprocess.CalledProcessError(proc.returncode, bin)
|
Disable tests until new repo is stable
|
Disable tests until new repo is stable
Change-Id: Ic6932c1028c72b5600d03ab59102d1c1cff1b36c
|
Python
|
apache-2.0
|
openstack-dev/heat-cfnclient
|
python
|
## Code Before:
import testtools
import heat_cfnclient
import os
import subprocess
basepath = os.path.join(heat_cfnclient.__path__[0], os.path.pardir)
class CliTest(testtools.TestCase):
def test_heat_cfn(self):
self.bin_run('heat-cfn')
def test_heat_boto(self):
self.bin_run('heat-boto')
def test_heat_watch(self):
self.bin_run('heat-watch')
def bin_run(self, bin):
fullpath = basepath + '/bin/' + bin
proc = subprocess.Popen(fullpath,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if proc.returncode:
print('Error executing %s:\n %s %s ' % (bin, stdout, stderr))
raise subprocess.CalledProcessError(proc.returncode, bin)
## Instruction:
Disable tests until new repo is stable
Change-Id: Ic6932c1028c72b5600d03ab59102d1c1cff1b36c
## Code After:
import testtools
import heat_cfnclient
import os
import subprocess
basepath = os.path.join(heat_cfnclient.__path__[0], os.path.pardir)
@testtools.skip
class CliTest(testtools.TestCase):
def test_heat_cfn(self):
self.bin_run('heat-cfn')
def test_heat_boto(self):
self.bin_run('heat-boto')
def test_heat_watch(self):
self.bin_run('heat-watch')
def bin_run(self, bin):
fullpath = basepath + '/bin/' + bin
proc = subprocess.Popen(fullpath,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if proc.returncode:
print('Error executing %s:\n %s %s ' % (bin, stdout, stderr))
raise subprocess.CalledProcessError(proc.returncode, bin)
|
// ... existing code ...
basepath = os.path.join(heat_cfnclient.__path__[0], os.path.pardir)
@testtools.skip
class CliTest(testtools.TestCase):
def test_heat_cfn(self):
// ... rest of the code ...
|
af3525bf174d0774b61464f9cc8ab8441babc7ae
|
examples/flask_alchemy/test_demoapp.py
|
examples/flask_alchemy/test_demoapp.py
|
import os
import unittest
import tempfile
import demoapp
import demoapp_factories
class DemoAppTestCase(unittest.TestCase):
def setUp(self):
demoapp.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
demoapp.app.config['TESTING'] = True
self.app = demoapp.app.test_client()
self.db = demoapp.db
self.db.create_all()
def tearDown(self):
self.db.drop_all()
def test_user_factory(self):
user = demoapp_factories.UserFactory()
self.db.session.commit()
self.assertIsNotNone(user.id)
self.assertEqual(1, len(demoapp.User.query.all()))
def test_userlog_factory(self):
userlog = demoapp_factories.UserLogFactory()
self.db.session.commit()
self.assertIsNotNone(userlog.id)
self.assertIsNotNone(userlog.user.id)
self.assertEqual(1, len(demoapp.User.query.all()))
self.assertEqual(1, len(demoapp.UserLog.query.all()))
|
import unittest
import demoapp
import demoapp_factories
class DemoAppTestCase(unittest.TestCase):
def setUp(self):
demoapp.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
demoapp.app.config['TESTING'] = True
self.app = demoapp.app.test_client()
self.db = demoapp.db
self.db.create_all()
def tearDown(self):
self.db.drop_all()
def test_user_factory(self):
user = demoapp_factories.UserFactory()
self.db.session.commit()
self.assertIsNotNone(user.id)
self.assertEqual(1, len(demoapp.User.query.all()))
def test_userlog_factory(self):
userlog = demoapp_factories.UserLogFactory()
self.db.session.commit()
self.assertIsNotNone(userlog.id)
self.assertIsNotNone(userlog.user.id)
self.assertEqual(1, len(demoapp.User.query.all()))
self.assertEqual(1, len(demoapp.UserLog.query.all()))
|
Remove useless imports from flask alchemy demo
|
Remove useless imports from flask alchemy demo
|
Python
|
mit
|
FactoryBoy/factory_boy
|
python
|
## Code Before:
import os
import unittest
import tempfile
import demoapp
import demoapp_factories
class DemoAppTestCase(unittest.TestCase):
def setUp(self):
demoapp.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
demoapp.app.config['TESTING'] = True
self.app = demoapp.app.test_client()
self.db = demoapp.db
self.db.create_all()
def tearDown(self):
self.db.drop_all()
def test_user_factory(self):
user = demoapp_factories.UserFactory()
self.db.session.commit()
self.assertIsNotNone(user.id)
self.assertEqual(1, len(demoapp.User.query.all()))
def test_userlog_factory(self):
userlog = demoapp_factories.UserLogFactory()
self.db.session.commit()
self.assertIsNotNone(userlog.id)
self.assertIsNotNone(userlog.user.id)
self.assertEqual(1, len(demoapp.User.query.all()))
self.assertEqual(1, len(demoapp.UserLog.query.all()))
## Instruction:
Remove useless imports from flask alchemy demo
## Code After:
import unittest
import demoapp
import demoapp_factories
class DemoAppTestCase(unittest.TestCase):
def setUp(self):
demoapp.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
demoapp.app.config['TESTING'] = True
self.app = demoapp.app.test_client()
self.db = demoapp.db
self.db.create_all()
def tearDown(self):
self.db.drop_all()
def test_user_factory(self):
user = demoapp_factories.UserFactory()
self.db.session.commit()
self.assertIsNotNone(user.id)
self.assertEqual(1, len(demoapp.User.query.all()))
def test_userlog_factory(self):
userlog = demoapp_factories.UserLogFactory()
self.db.session.commit()
self.assertIsNotNone(userlog.id)
self.assertIsNotNone(userlog.user.id)
self.assertEqual(1, len(demoapp.User.query.all()))
self.assertEqual(1, len(demoapp.UserLog.query.all()))
|
# ... existing code ...
import unittest
import demoapp
import demoapp_factories
class DemoAppTestCase(unittest.TestCase):
# ... rest of the code ...
|
b79d3efea5f31ee74a5960522b0552fc9b43dc43
|
test/Parser/nullability.c
|
test/Parser/nullability.c
|
// RUN: %clang_cc1 -fsyntax-only -std=c99 -Wno-nullability-declspec -pedantic %s -verify
_Nonnull int *ptr; // expected-warning{{type nullability specifier '_Nonnull' is a Clang extension}}
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wnullability-extension"
_Nonnull int *ptr2; // no-warning
#pragma clang diagnostic pop
#if __has_feature(nullability)
# error Nullability should not be supported in C under -pedantic -std=c99
#endif
#if !__has_extension(nullability)
# error Nullability should always be supported as an extension
#endif
|
// RUN: %clang_cc1 -fsyntax-only -std=c99 -Wno-nullability-declspec -pedantic %s -verify
_Nonnull int *ptr; // expected-warning{{type nullability specifier '_Nonnull' is a Clang extension}}
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wnullability-extension"
_Nonnull int *ptr2; // no-warning
#pragma clang diagnostic pop
#if !__has_feature(nullability)
# error Nullability should always be supported
#endif
#if !__has_extension(nullability)
# error Nullability should always be supported as an extension
#endif
|
Fix a test case broken by my previous commit.
|
Fix a test case broken by my previous commit.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@240977 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang
|
c
|
## Code Before:
// RUN: %clang_cc1 -fsyntax-only -std=c99 -Wno-nullability-declspec -pedantic %s -verify
_Nonnull int *ptr; // expected-warning{{type nullability specifier '_Nonnull' is a Clang extension}}
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wnullability-extension"
_Nonnull int *ptr2; // no-warning
#pragma clang diagnostic pop
#if __has_feature(nullability)
# error Nullability should not be supported in C under -pedantic -std=c99
#endif
#if !__has_extension(nullability)
# error Nullability should always be supported as an extension
#endif
## Instruction:
Fix a test case broken by my previous commit.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@240977 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang_cc1 -fsyntax-only -std=c99 -Wno-nullability-declspec -pedantic %s -verify
_Nonnull int *ptr; // expected-warning{{type nullability specifier '_Nonnull' is a Clang extension}}
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wnullability-extension"
_Nonnull int *ptr2; // no-warning
#pragma clang diagnostic pop
#if !__has_feature(nullability)
# error Nullability should always be supported
#endif
#if !__has_extension(nullability)
# error Nullability should always be supported as an extension
#endif
|
# ... existing code ...
_Nonnull int *ptr2; // no-warning
#pragma clang diagnostic pop
#if !__has_feature(nullability)
# error Nullability should always be supported
#endif
#if !__has_extension(nullability)
# ... rest of the code ...
|
6f968a4aa4048163dd55f927a32da2477cd8c1ff
|
tx_salaries/search_indexes.py
|
tx_salaries/search_indexes.py
|
from haystack import indexes
from tx_people.models import Organization
from tx_salaries.models import Employee
class EmployeeIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
content_auto = indexes.EdgeNgramField(model_attr='position__person__name')
compensation = indexes.FloatField(model_attr='compensation', null=True)
title = indexes.CharField(model_attr='title__name', faceted=True)
department = indexes.CharField(model_attr='position__organization__name', faceted=True)
entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True)
def get_model(self):
return Employee
|
from haystack import indexes
from tx_salaries.models import Employee
class EmployeeIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
content_auto = indexes.EdgeNgramField(model_attr='position__person__name')
compensation = indexes.FloatField(model_attr='compensation', null=True)
title = indexes.CharField(model_attr='title__name', faceted=True)
title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True)
department = indexes.CharField(model_attr='position__organization__name', faceted=True)
department_slug = indexes.CharField(model_attr='position__organization__stats__slug')
entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True)
entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug')
def get_model(self):
return Employee
|
Index slugs to reduce search page queries
|
Index slugs to reduce search page queries
|
Python
|
apache-2.0
|
texastribune/tx_salaries,texastribune/tx_salaries
|
python
|
## Code Before:
from haystack import indexes
from tx_people.models import Organization
from tx_salaries.models import Employee
class EmployeeIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
content_auto = indexes.EdgeNgramField(model_attr='position__person__name')
compensation = indexes.FloatField(model_attr='compensation', null=True)
title = indexes.CharField(model_attr='title__name', faceted=True)
department = indexes.CharField(model_attr='position__organization__name', faceted=True)
entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True)
def get_model(self):
return Employee
## Instruction:
Index slugs to reduce search page queries
## Code After:
from haystack import indexes
from tx_salaries.models import Employee
class EmployeeIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
content_auto = indexes.EdgeNgramField(model_attr='position__person__name')
compensation = indexes.FloatField(model_attr='compensation', null=True)
title = indexes.CharField(model_attr='title__name', faceted=True)
title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True)
department = indexes.CharField(model_attr='position__organization__name', faceted=True)
department_slug = indexes.CharField(model_attr='position__organization__stats__slug')
entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True)
entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug')
def get_model(self):
return Employee
|
# ... existing code ...
from haystack import indexes
from tx_salaries.models import Employee
# ... modified code ...
content_auto = indexes.EdgeNgramField(model_attr='position__person__name')
compensation = indexes.FloatField(model_attr='compensation', null=True)
title = indexes.CharField(model_attr='title__name', faceted=True)
title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True)
department = indexes.CharField(model_attr='position__organization__name', faceted=True)
department_slug = indexes.CharField(model_attr='position__organization__stats__slug')
entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True)
entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug')
def get_model(self):
return Employee
# ... rest of the code ...
|
56e2bea0798ae3afbc50d53947e505e7df9edba3
|
config/invariant_checks.py
|
config/invariant_checks.py
|
from sts.invariant_checker import InvariantChecker
def check_for_loops_or_connectivity(simulation):
from sts.invariant_checker import InvariantChecker
result = InvariantChecker.check_loops(simulation)
if result:
return result
result = InvariantChecker.python_check_connectivity(simulation)
if not result:
print "Connectivity established - bailing out"
import sys
sys.exit(0)
return []
# Note: make sure to add new custom invariant checks to this dictionary!
name_to_invariant_check = {
"check_for_loops_or_connectivity" : check_for_loops_or_connectivity,
"InvariantChecker.check_liveness" : InvariantChecker.check_liveness,
"InvariantChecker.check_loops" : InvariantChecker.check_loops,
"InvariantChecker.python_check_connectivity" : InvariantChecker.python_check_connectivity,
"InvariantChecker.check_connectivity" : InvariantChecker.check_connectivity,
"InvariantChecker.check_blackholes" : InvariantChecker.check_blackholes,
"InvariantChecker.check_correspondence" : InvariantChecker.check_correspondence,
}
|
from sts.invariant_checker import InvariantChecker
import sys
def bail_on_connectivity(simulation):
result = InvariantChecker.python_check_connectivity(simulation)
if not result:
print "Connectivity established - bailing out"
sys.exit(0)
return []
def check_for_loops_or_connectivity(simulation):
result = InvariantChecker.check_loops(simulation)
if result:
return result
return bail_on_connectivity(simulation)
def check_for_loops_blackholes_or_connectivity(simulation):
for check in [InvariantChecker.check_loops, InvariantChecker.check_blackholes]:
result = check(simulation)
if result:
return result
return bail_on_connectivity(simulation)
# Note: make sure to add new custom invariant checks to this dictionary!
name_to_invariant_check = {
"check_for_loops_or_connectivity" : check_for_loops_or_connectivity,
"check_for_loops_blackholes_or_connectivity" : check_for_loops_blackholes_or_connectivity,
"InvariantChecker.check_liveness" : InvariantChecker.check_liveness,
"InvariantChecker.check_loops" : InvariantChecker.check_loops,
"InvariantChecker.python_check_connectivity" : InvariantChecker.python_check_connectivity,
"InvariantChecker.check_connectivity" : InvariantChecker.check_connectivity,
"InvariantChecker.check_blackholes" : InvariantChecker.check_blackholes,
"InvariantChecker.check_correspondence" : InvariantChecker.check_correspondence,
}
|
Add a new invariant check: check blackholes *or* loops
|
Add a new invariant check: check blackholes *or* loops
|
Python
|
apache-2.0
|
ucb-sts/sts,jmiserez/sts,jmiserez/sts,ucb-sts/sts
|
python
|
## Code Before:
from sts.invariant_checker import InvariantChecker
def check_for_loops_or_connectivity(simulation):
from sts.invariant_checker import InvariantChecker
result = InvariantChecker.check_loops(simulation)
if result:
return result
result = InvariantChecker.python_check_connectivity(simulation)
if not result:
print "Connectivity established - bailing out"
import sys
sys.exit(0)
return []
# Note: make sure to add new custom invariant checks to this dictionary!
name_to_invariant_check = {
"check_for_loops_or_connectivity" : check_for_loops_or_connectivity,
"InvariantChecker.check_liveness" : InvariantChecker.check_liveness,
"InvariantChecker.check_loops" : InvariantChecker.check_loops,
"InvariantChecker.python_check_connectivity" : InvariantChecker.python_check_connectivity,
"InvariantChecker.check_connectivity" : InvariantChecker.check_connectivity,
"InvariantChecker.check_blackholes" : InvariantChecker.check_blackholes,
"InvariantChecker.check_correspondence" : InvariantChecker.check_correspondence,
}
## Instruction:
Add a new invariant check: check blackholes *or* loops
## Code After:
from sts.invariant_checker import InvariantChecker
import sys
def bail_on_connectivity(simulation):
result = InvariantChecker.python_check_connectivity(simulation)
if not result:
print "Connectivity established - bailing out"
sys.exit(0)
return []
def check_for_loops_or_connectivity(simulation):
result = InvariantChecker.check_loops(simulation)
if result:
return result
return bail_on_connectivity(simulation)
def check_for_loops_blackholes_or_connectivity(simulation):
for check in [InvariantChecker.check_loops, InvariantChecker.check_blackholes]:
result = check(simulation)
if result:
return result
return bail_on_connectivity(simulation)
# Note: make sure to add new custom invariant checks to this dictionary!
name_to_invariant_check = {
"check_for_loops_or_connectivity" : check_for_loops_or_connectivity,
"check_for_loops_blackholes_or_connectivity" : check_for_loops_blackholes_or_connectivity,
"InvariantChecker.check_liveness" : InvariantChecker.check_liveness,
"InvariantChecker.check_loops" : InvariantChecker.check_loops,
"InvariantChecker.python_check_connectivity" : InvariantChecker.python_check_connectivity,
"InvariantChecker.check_connectivity" : InvariantChecker.check_connectivity,
"InvariantChecker.check_blackholes" : InvariantChecker.check_blackholes,
"InvariantChecker.check_correspondence" : InvariantChecker.check_correspondence,
}
|
...
from sts.invariant_checker import InvariantChecker
import sys
def bail_on_connectivity(simulation):
result = InvariantChecker.python_check_connectivity(simulation)
if not result:
print "Connectivity established - bailing out"
sys.exit(0)
return []
def check_for_loops_or_connectivity(simulation):
result = InvariantChecker.check_loops(simulation)
if result:
return result
return bail_on_connectivity(simulation)
def check_for_loops_blackholes_or_connectivity(simulation):
for check in [InvariantChecker.check_loops, InvariantChecker.check_blackholes]:
result = check(simulation)
if result:
return result
return bail_on_connectivity(simulation)
# Note: make sure to add new custom invariant checks to this dictionary!
name_to_invariant_check = {
"check_for_loops_or_connectivity" : check_for_loops_or_connectivity,
"check_for_loops_blackholes_or_connectivity" : check_for_loops_blackholes_or_connectivity,
"InvariantChecker.check_liveness" : InvariantChecker.check_liveness,
"InvariantChecker.check_loops" : InvariantChecker.check_loops,
"InvariantChecker.python_check_connectivity" : InvariantChecker.python_check_connectivity,
...
|
e209f568a4f70c65a7077ba210c119372c20204d
|
libaums/src/main/java/com/github/mjdev/libaums/usb/UsbCommunicationFactory.java
|
libaums/src/main/java/com/github/mjdev/libaums/usb/UsbCommunicationFactory.java
|
package com.github.mjdev.libaums.usb;
import android.hardware.usb.UsbDeviceConnection;
import android.hardware.usb.UsbEndpoint;
import android.os.Build;
import android.util.Log;
/**
* Created by magnusja on 21/12/16.
*/
public class UsbCommunicationFactory {
enum UnderlyingUsbCommunication {
USB_REQUEST,
DEVICE_CONNECTION
}
private static final String TAG = UsbCommunicationFactory.class.getSimpleName();
private static UnderlyingUsbCommunication underlyingUsbCommunication = UnderlyingUsbCommunication.USB_REQUEST;
public static UsbCommunication createUsbCommunication(UsbDeviceConnection deviceConnection, UsbEndpoint outEndpoint, UsbEndpoint inEndpoint) {
UsbCommunication communication;
if (underlyingUsbCommunication == UnderlyingUsbCommunication.DEVICE_CONNECTION) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
communication = new JellyBeanMr2Communication(deviceConnection, outEndpoint, inEndpoint);
} else {
Log.i(TAG, "using workaround usb communication");
communication = new HoneyCombMr1Communication(deviceConnection, outEndpoint, inEndpoint);
}
} else {
communication = new UsbRequestCommunication(deviceConnection, outEndpoint, inEndpoint);
}
return communication;
}
public static void setUnderlyingUsbCommunication(UnderlyingUsbCommunication underlyingUsbCommunication) {
UsbCommunicationFactory.underlyingUsbCommunication = underlyingUsbCommunication;
}
}
|
package com.github.mjdev.libaums.usb;
import android.hardware.usb.UsbDeviceConnection;
import android.hardware.usb.UsbEndpoint;
import android.os.Build;
import android.util.Log;
/**
* Created by magnusja on 21/12/16.
*/
public class UsbCommunicationFactory {
enum UnderlyingUsbCommunication {
USB_REQUEST_ASYNC,
DEVICE_CONNECTION_SYNC
}
private static final String TAG = UsbCommunicationFactory.class.getSimpleName();
private static UnderlyingUsbCommunication underlyingUsbCommunication = UnderlyingUsbCommunication.DEVICE_CONNECTION_SYNC;
public static UsbCommunication createUsbCommunication(UsbDeviceConnection deviceConnection, UsbEndpoint outEndpoint, UsbEndpoint inEndpoint) {
UsbCommunication communication;
if (underlyingUsbCommunication == UnderlyingUsbCommunication.DEVICE_CONNECTION_SYNC) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
communication = new JellyBeanMr2Communication(deviceConnection, outEndpoint, inEndpoint);
} else {
Log.i(TAG, "using workaround usb communication");
communication = new HoneyCombMr1Communication(deviceConnection, outEndpoint, inEndpoint);
}
} else {
communication = new UsbRequestCommunication(deviceConnection, outEndpoint, inEndpoint);
}
return communication;
}
public static void setUnderlyingUsbCommunication(UnderlyingUsbCommunication underlyingUsbCommunication) {
UsbCommunicationFactory.underlyingUsbCommunication = underlyingUsbCommunication;
}
}
|
Reset to device connection usb communication
|
Reset to device connection usb communication
|
Java
|
apache-2.0
|
magnusja/libaums,magnusja/libaums,mjdev/libaums,magnusja/libaums
|
java
|
## Code Before:
package com.github.mjdev.libaums.usb;
import android.hardware.usb.UsbDeviceConnection;
import android.hardware.usb.UsbEndpoint;
import android.os.Build;
import android.util.Log;
/**
* Created by magnusja on 21/12/16.
*/
public class UsbCommunicationFactory {
enum UnderlyingUsbCommunication {
USB_REQUEST,
DEVICE_CONNECTION
}
private static final String TAG = UsbCommunicationFactory.class.getSimpleName();
private static UnderlyingUsbCommunication underlyingUsbCommunication = UnderlyingUsbCommunication.USB_REQUEST;
public static UsbCommunication createUsbCommunication(UsbDeviceConnection deviceConnection, UsbEndpoint outEndpoint, UsbEndpoint inEndpoint) {
UsbCommunication communication;
if (underlyingUsbCommunication == UnderlyingUsbCommunication.DEVICE_CONNECTION) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
communication = new JellyBeanMr2Communication(deviceConnection, outEndpoint, inEndpoint);
} else {
Log.i(TAG, "using workaround usb communication");
communication = new HoneyCombMr1Communication(deviceConnection, outEndpoint, inEndpoint);
}
} else {
communication = new UsbRequestCommunication(deviceConnection, outEndpoint, inEndpoint);
}
return communication;
}
public static void setUnderlyingUsbCommunication(UnderlyingUsbCommunication underlyingUsbCommunication) {
UsbCommunicationFactory.underlyingUsbCommunication = underlyingUsbCommunication;
}
}
## Instruction:
Reset to device connection usb communication
## Code After:
package com.github.mjdev.libaums.usb;
import android.hardware.usb.UsbDeviceConnection;
import android.hardware.usb.UsbEndpoint;
import android.os.Build;
import android.util.Log;
/**
* Created by magnusja on 21/12/16.
*/
public class UsbCommunicationFactory {
enum UnderlyingUsbCommunication {
USB_REQUEST_ASYNC,
DEVICE_CONNECTION_SYNC
}
private static final String TAG = UsbCommunicationFactory.class.getSimpleName();
private static UnderlyingUsbCommunication underlyingUsbCommunication = UnderlyingUsbCommunication.DEVICE_CONNECTION_SYNC;
public static UsbCommunication createUsbCommunication(UsbDeviceConnection deviceConnection, UsbEndpoint outEndpoint, UsbEndpoint inEndpoint) {
UsbCommunication communication;
if (underlyingUsbCommunication == UnderlyingUsbCommunication.DEVICE_CONNECTION_SYNC) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
communication = new JellyBeanMr2Communication(deviceConnection, outEndpoint, inEndpoint);
} else {
Log.i(TAG, "using workaround usb communication");
communication = new HoneyCombMr1Communication(deviceConnection, outEndpoint, inEndpoint);
}
} else {
communication = new UsbRequestCommunication(deviceConnection, outEndpoint, inEndpoint);
}
return communication;
}
public static void setUnderlyingUsbCommunication(UnderlyingUsbCommunication underlyingUsbCommunication) {
UsbCommunicationFactory.underlyingUsbCommunication = underlyingUsbCommunication;
}
}
|
// ... existing code ...
public class UsbCommunicationFactory {
enum UnderlyingUsbCommunication {
USB_REQUEST_ASYNC,
DEVICE_CONNECTION_SYNC
}
private static final String TAG = UsbCommunicationFactory.class.getSimpleName();
private static UnderlyingUsbCommunication underlyingUsbCommunication = UnderlyingUsbCommunication.DEVICE_CONNECTION_SYNC;
public static UsbCommunication createUsbCommunication(UsbDeviceConnection deviceConnection, UsbEndpoint outEndpoint, UsbEndpoint inEndpoint) {
UsbCommunication communication;
if (underlyingUsbCommunication == UnderlyingUsbCommunication.DEVICE_CONNECTION_SYNC) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
communication = new JellyBeanMr2Communication(deviceConnection, outEndpoint, inEndpoint);
} else {
// ... rest of the code ...
|
086610926fb12b35881c06d40c295be81ddc3173
|
include/llvm/CodeGen/Passes.h
|
include/llvm/CodeGen/Passes.h
|
//===-- Passes.h - Target independent code generation passes ----*- C++ -*-===//
//
// This file defines interfaces to access the target independent code generation
// passes provided by the LLVM backend.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_CODEGEN_PASSES_H
#define LLVM_CODEGEN_PASSES_H
class FunctionPass;
class PassInfo;
// PHIElimination pass - This pass eliminates machine instruction PHI nodes by
// inserting copy instructions. This destroys SSA information, but is the
// desired input for some register allocators. This pass is "required" by these
// register allocator like this: AU.addRequiredID(PHIEliminationID);
//
extern const PassInfo *PHIEliminationID;
/// SimpleRegisterAllocation Pass - This pass converts the input machine code
/// from SSA form to use explicit registers by spilling every register. Wow,
/// great policy huh?
///
FunctionPass *createSimpleRegisterAllocator();
/// LocalRegisterAllocation Pass - This pass register allocates the input code a
/// basic block at a time, yielding code better than the simple register
/// allocator, but not as good as a global allocator.
///
FunctionPass *createLocalRegisterAllocator();
/// PrologEpilogCodeInserter Pass - This pass inserts prolog and epilog code,
/// and eliminates abstract frame references.
///
FunctionPass *createPrologEpilogCodeInserter();
#endif
|
//===-- Passes.h - Target independent code generation passes ----*- C++ -*-===//
//
// This file defines interfaces to access the target independent code generation
// passes provided by the LLVM backend.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_CODEGEN_PASSES_H
#define LLVM_CODEGEN_PASSES_H
class FunctionPass;
class PassInfo;
// PHIElimination pass - This pass eliminates machine instruction PHI nodes by
// inserting copy instructions. This destroys SSA information, but is the
// desired input for some register allocators. This pass is "required" by these
// register allocator like this: AU.addRequiredID(PHIEliminationID);
//
extern const PassInfo *PHIEliminationID;
/// SimpleRegisterAllocation Pass - This pass converts the input machine code
/// from SSA form to use explicit registers by spilling every register. Wow,
/// great policy huh?
///
FunctionPass *createSimpleRegisterAllocator();
/// LocalRegisterAllocation Pass - This pass register allocates the input code a
/// basic block at a time, yielding code better than the simple register
/// allocator, but not as good as a global allocator.
///
FunctionPass *createLocalRegisterAllocator();
/// PrologEpilogCodeInserter Pass - This pass inserts prolog and epilog code,
/// and eliminates abstract frame references.
///
FunctionPass *createPrologEpilogCodeInserter();
/// getRegisterAllocator - This creates an instance of the register allocator
/// for the Sparc.
FunctionPass *getRegisterAllocator(TargetMachine &T);
#endif
|
Include the sparc register in this file
|
Include the sparc register in this file
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@8794 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
bsd-2-clause
|
chubbymaggie/asap,llvm-mirror/llvm,llvm-mirror/llvm,dslab-epfl/asap,chubbymaggie/asap,apple/swift-llvm,GPUOpen-Drivers/llvm,chubbymaggie/asap,dslab-epfl/asap,apple/swift-llvm,llvm-mirror/llvm,chubbymaggie/asap,chubbymaggie/asap,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,dslab-epfl/asap,GPUOpen-Drivers/llvm,apple/swift-llvm,dslab-epfl/asap,dslab-epfl/asap,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,dslab-epfl/asap,chubbymaggie/asap,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,dslab-epfl/asap
|
c
|
## Code Before:
//===-- Passes.h - Target independent code generation passes ----*- C++ -*-===//
//
// This file defines interfaces to access the target independent code generation
// passes provided by the LLVM backend.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_CODEGEN_PASSES_H
#define LLVM_CODEGEN_PASSES_H
class FunctionPass;
class PassInfo;
// PHIElimination pass - This pass eliminates machine instruction PHI nodes by
// inserting copy instructions. This destroys SSA information, but is the
// desired input for some register allocators. This pass is "required" by these
// register allocator like this: AU.addRequiredID(PHIEliminationID);
//
extern const PassInfo *PHIEliminationID;
/// SimpleRegisterAllocation Pass - This pass converts the input machine code
/// from SSA form to use explicit registers by spilling every register. Wow,
/// great policy huh?
///
FunctionPass *createSimpleRegisterAllocator();
/// LocalRegisterAllocation Pass - This pass register allocates the input code a
/// basic block at a time, yielding code better than the simple register
/// allocator, but not as good as a global allocator.
///
FunctionPass *createLocalRegisterAllocator();
/// PrologEpilogCodeInserter Pass - This pass inserts prolog and epilog code,
/// and eliminates abstract frame references.
///
FunctionPass *createPrologEpilogCodeInserter();
#endif
## Instruction:
Include the sparc register in this file
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@8794 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
//===-- Passes.h - Target independent code generation passes ----*- C++ -*-===//
//
// This file defines interfaces to access the target independent code generation
// passes provided by the LLVM backend.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_CODEGEN_PASSES_H
#define LLVM_CODEGEN_PASSES_H
class FunctionPass;
class PassInfo;
// PHIElimination pass - This pass eliminates machine instruction PHI nodes by
// inserting copy instructions. This destroys SSA information, but is the
// desired input for some register allocators. This pass is "required" by these
// register allocator like this: AU.addRequiredID(PHIEliminationID);
//
extern const PassInfo *PHIEliminationID;
/// SimpleRegisterAllocation Pass - This pass converts the input machine code
/// from SSA form to use explicit registers by spilling every register. Wow,
/// great policy huh?
///
FunctionPass *createSimpleRegisterAllocator();
/// LocalRegisterAllocation Pass - This pass register allocates the input code a
/// basic block at a time, yielding code better than the simple register
/// allocator, but not as good as a global allocator.
///
FunctionPass *createLocalRegisterAllocator();
/// PrologEpilogCodeInserter Pass - This pass inserts prolog and epilog code,
/// and eliminates abstract frame references.
///
FunctionPass *createPrologEpilogCodeInserter();
/// getRegisterAllocator - This creates an instance of the register allocator
/// for the Sparc.
FunctionPass *getRegisterAllocator(TargetMachine &T);
#endif
|
// ... existing code ...
///
FunctionPass *createPrologEpilogCodeInserter();
/// getRegisterAllocator - This creates an instance of the register allocator
/// for the Sparc.
FunctionPass *getRegisterAllocator(TargetMachine &T);
#endif
// ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.