commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bf6f77d90c3749983eb0b5358fb2f9fedb7d53da
|
app/main.py
|
app/main.py
|
import spark
import motion
from bot import process_command
from config import config
from flask import Flask
from flask import request
from flask import jsonify
from threading import Thread
import time
import sys
app = Flask(__name__)
def on_motion_detected():
print("motion detected!")
def run_motion_detection():
print("hello")
motion.detector_on(on_motion_detected)
def run_flask_server():
app.run(host='0.0.0.0', port=8181)
@app.route("/", methods=["post"])
def index():
# Parse request
webhook_req = request.get_json()
message = spark.get_message(message_id=webhook_req['data']['id'], bearer=config["bearer"])
if message["personEmail"] != config["bot_email"]:
res = process_command(message["command"])
if res["response_required"]:
spark.send_message(message["roomId"], res["data"], config["bearer"])
return jsonify({})
if __name__ == "__main__":
motion_thread = Thread(target = run_motion_detection)
flask_thread = Thread(target = run_flask_server)
motion_thread.daemon = True
flask_thread.daemon = True
motion_thread.start()
flask_thread.start()
|
import spark
import motion
from bot import process_command
from config import config
from flask import Flask
from flask import request
from flask import jsonify
from threading import Thread
import time
import sys
app = Flask(__name__)
def on_motion_detected():
print("motion detected!")
def run_motion_detection():
print("hello")
motion.detector_on(on_motion_detected)
def run_flask_server():
app.run(host='0.0.0.0', port=8181)
@app.route("/", methods=["post"])
def index():
# Parse request
webhook_req = request.get_json()
message = spark.get_message(message_id=webhook_req['data']['id'], bearer=config["bearer"])
if message["personEmail"] != config["bot_email"]:
res = process_command(message["command"])
if res["response_required"]:
spark.send_message(message["roomId"], res["data"], config["bearer"])
return jsonify({})
if __name__ == "__main__":
motion_thread = Thread(target = run_motion_detection)
motion_thread.daemon = True
motion_thread.start()
app.run(host='0.0.0.0', port=8080)
|
Use Python 3.6 compatible API for threading
|
Use Python 3.6 compatible API for threading
|
Python
|
mit
|
alwye/spark-pi,alwye/spark-pi
|
import spark
import motion
from bot import process_command
from config import config
from flask import Flask
from flask import request
from flask import jsonify
from threading import Thread
import time
import sys
app = Flask(__name__)
def on_motion_detected():
print("motion detected!")
def run_motion_detection():
print("hello")
motion.detector_on(on_motion_detected)
def run_flask_server():
app.run(host='0.0.0.0', port=8181)
@app.route("/", methods=["post"])
def index():
# Parse request
webhook_req = request.get_json()
message = spark.get_message(message_id=webhook_req['data']['id'], bearer=config["bearer"])
if message["personEmail"] != config["bot_email"]:
res = process_command(message["command"])
if res["response_required"]:
spark.send_message(message["roomId"], res["data"], config["bearer"])
return jsonify({})
if __name__ == "__main__":
motion_thread = Thread(target = run_motion_detection)
- flask_thread = Thread(target = run_flask_server)
motion_thread.daemon = True
- flask_thread.daemon = True
motion_thread.start()
- flask_thread.start()
+ app.run(host='0.0.0.0', port=8080)
|
Use Python 3.6 compatible API for threading
|
## Code Before:
import spark
import motion
from bot import process_command
from config import config
from flask import Flask
from flask import request
from flask import jsonify
from threading import Thread
import time
import sys
app = Flask(__name__)
def on_motion_detected():
print("motion detected!")
def run_motion_detection():
print("hello")
motion.detector_on(on_motion_detected)
def run_flask_server():
app.run(host='0.0.0.0', port=8181)
@app.route("/", methods=["post"])
def index():
# Parse request
webhook_req = request.get_json()
message = spark.get_message(message_id=webhook_req['data']['id'], bearer=config["bearer"])
if message["personEmail"] != config["bot_email"]:
res = process_command(message["command"])
if res["response_required"]:
spark.send_message(message["roomId"], res["data"], config["bearer"])
return jsonify({})
if __name__ == "__main__":
motion_thread = Thread(target = run_motion_detection)
flask_thread = Thread(target = run_flask_server)
motion_thread.daemon = True
flask_thread.daemon = True
motion_thread.start()
flask_thread.start()
## Instruction:
Use Python 3.6 compatible API for threading
## Code After:
import spark
import motion
from bot import process_command
from config import config
from flask import Flask
from flask import request
from flask import jsonify
from threading import Thread
import time
import sys
app = Flask(__name__)
def on_motion_detected():
print("motion detected!")
def run_motion_detection():
print("hello")
motion.detector_on(on_motion_detected)
def run_flask_server():
app.run(host='0.0.0.0', port=8181)
@app.route("/", methods=["post"])
def index():
# Parse request
webhook_req = request.get_json()
message = spark.get_message(message_id=webhook_req['data']['id'], bearer=config["bearer"])
if message["personEmail"] != config["bot_email"]:
res = process_command(message["command"])
if res["response_required"]:
spark.send_message(message["roomId"], res["data"], config["bearer"])
return jsonify({})
if __name__ == "__main__":
motion_thread = Thread(target = run_motion_detection)
motion_thread.daemon = True
motion_thread.start()
app.run(host='0.0.0.0', port=8080)
|
# ... existing code ...
motion_thread = Thread(target = run_motion_detection)
motion_thread.daemon = True
motion_thread.start()
app.run(host='0.0.0.0', port=8080)
# ... rest of the code ...
|
e9031ab6091e3b9d7866c300c8e21b9e81e7e935
|
api/collections/urls.py
|
api/collections/urls.py
|
from django.conf.urls import url
from api.collections import views
from website import settings
urlpatterns = []
# Routes only active in local/staging environments
if settings.DEV_MODE:
urlpatterns.extend([
url(r'^$', views.CollectionList.as_view(), name='collection-list'),
url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'),
url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'),
url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
])
|
from django.conf.urls import url
from api.collections import views
urlpatterns = [
url(r'^$', views.CollectionList.as_view(), name='collection-list'),
url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'),
url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'),
url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
Remove DEV ONLY on the sub view since the super already has it
|
Remove DEV ONLY on the sub view since the super already has it
|
Python
|
apache-2.0
|
cslzchen/osf.io,leb2dg/osf.io,kch8qx/osf.io,TomBaxter/osf.io,wearpants/osf.io,laurenrevere/osf.io,chrisseto/osf.io,cslzchen/osf.io,mluo613/osf.io,rdhyee/osf.io,mluke93/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,billyhunt/osf.io,mluo613/osf.io,caseyrollins/osf.io,zachjanicki/osf.io,jnayak1/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,alexschiller/osf.io,aaxelb/osf.io,acshi/osf.io,kwierman/osf.io,chennan47/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,cslzchen/osf.io,jnayak1/osf.io,alexschiller/osf.io,GageGaskins/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,samanehsan/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,kwierman/osf.io,CenterForOpenScience/osf.io,danielneis/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,asanfilippo7/osf.io,TomBaxter/osf.io,caneruguz/osf.io,samanehsan/osf.io,mattclark/osf.io,binoculars/osf.io,alexschiller/osf.io,danielneis/osf.io,abought/osf.io,cwisecarver/osf.io,adlius/osf.io,pattisdr/osf.io,mfraezz/osf.io,saradbowman/osf.io,TomHeatwole/osf.io,kch8qx/osf.io,erinspace/osf.io,emetsger/osf.io,chrisseto/osf.io,acshi/osf.io,kwierman/osf.io,cwisecarver/osf.io,doublebits/osf.io,samchrisinger/osf.io,samchrisinger/osf.io,caseyrygt/osf.io,erinspace/osf.io,brianjgeiger/osf.io,emetsger/osf.io,abought/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,felliott/osf.io,KAsante95/osf.io,mluke93/osf.io,crcresearch/osf.io,KAsante95/osf.io,GageGaskins/osf.io,RomanZWang/osf.io,Ghalko/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,wearpants/osf.io,billyhunt/osf.io,Nesiehr/osf.io,aaxelb/osf.io,doublebits/osf.io,GageGaskins/osf.io,billyhunt/osf.io,caneruguz/osf.io,caseyrollins/osf.io,felliott/osf.io,ZobairAlijan/osf.io,acshi/osf.io,leb2dg/osf.io,ticklemepierce/osf.io,ticklemepierce/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,zachjanicki/osf.io,samchrisinger/osf.io,doublebits/osf.io,abought/osf.io,Nesiehr/osf.io,mluo613/osf.io,RomanZWang/osf.io,mluo613/osf.io,brandonPurvis/osf.io,wearpants/osf.io,leb2dg/osf.io,chennan47/osf.io,sloria/osf.io,mfraezz/osf.io,sloria/osf.io,doublebits/osf.io,caneruguz/osf.io,kwierman/osf.io,samanehsan/osf.io,chrisseto/osf.io,zamattiac/osf.io,ticklemepierce/osf.io,Nesiehr/osf.io,binoculars/osf.io,TomHeatwole/osf.io,mluo613/osf.io,Ghalko/osf.io,laurenrevere/osf.io,baylee-d/osf.io,mluke93/osf.io,hmoco/osf.io,KAsante95/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,felliott/osf.io,amyshi188/osf.io,zamattiac/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,zamattiac/osf.io,caseyrygt/osf.io,TomHeatwole/osf.io,icereval/osf.io,baylee-d/osf.io,erinspace/osf.io,ZobairAlijan/osf.io,acshi/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,doublebits/osf.io,SSJohns/osf.io,cwisecarver/osf.io,caneruguz/osf.io,DanielSBrown/osf.io,ZobairAlijan/osf.io,samanehsan/osf.io,aaxelb/osf.io,rdhyee/osf.io,amyshi188/osf.io,chrisseto/osf.io,asanfilippo7/osf.io,caseyrollins/osf.io,kch8qx/osf.io,caseyrygt/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,rdhyee/osf.io,icereval/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,saradbowman/osf.io,cslzchen/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,emetsger/osf.io,KAsante95/osf.io,mfraezz/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,sloria/osf.io,billyhunt/osf.io,laurenrevere/osf.io,pattisdr/osf.io,emetsger/osf.io,alexschiller/osf.io,abought/osf.io,zachjanicki/osf.io,icereval/osf.io,jnayak1/osf.io,danielneis/osf.io,RomanZWang/osf.io,mattclark/osf.io,danielneis/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,adlius/osf.io,mattclark/osf.io,rdhyee/osf.io,hmoco/osf.io,brandonPurvis/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,kch8qx/osf.io,baylee-d/osf.io,caseyrygt/osf.io,acshi/osf.io,cwisecarver/osf.io,Ghalko/osf.io,aaxelb/osf.io,KAsante95/osf.io,kch8qx/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,wearpants/osf.io,jnayak1/osf.io,binoculars/osf.io,monikagrabowska/osf.io,felliott/osf.io,hmoco/osf.io,SSJohns/osf.io,TomBaxter/osf.io,DanielSBrown/osf.io,crcresearch/osf.io
|
from django.conf.urls import url
from api.collections import views
- from website import settings
- urlpatterns = []
+ urlpatterns = [
+ url(r'^$', views.CollectionList.as_view(), name='collection-list'),
+ url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'),
+ url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'),
+ url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
+ url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
+ ]
- # Routes only active in local/staging environments
- if settings.DEV_MODE:
- urlpatterns.extend([
- url(r'^$', views.CollectionList.as_view(), name='collection-list'),
- url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'),
- url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'),
- url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
- url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
- ])
-
|
Remove DEV ONLY on the sub view since the super already has it
|
## Code Before:
from django.conf.urls import url
from api.collections import views
from website import settings
urlpatterns = []
# Routes only active in local/staging environments
if settings.DEV_MODE:
urlpatterns.extend([
url(r'^$', views.CollectionList.as_view(), name='collection-list'),
url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'),
url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'),
url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
])
## Instruction:
Remove DEV ONLY on the sub view since the super already has it
## Code After:
from django.conf.urls import url
from api.collections import views
urlpatterns = [
url(r'^$', views.CollectionList.as_view(), name='collection-list'),
url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'),
url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'),
url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
# ... existing code ...
from api.collections import views
urlpatterns = [
url(r'^$', views.CollectionList.as_view(), name='collection-list'),
url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'),
url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'),
url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
# ... rest of the code ...
|
d1c18841d8a028f76283b9779da61d482df75973
|
plumeria/plugins/youtube.py
|
plumeria/plugins/youtube.py
|
from plumeria.api.youtube import YouTube
from plumeria.command import commands, CommandError, channel_only
from plumeria.message import Response
from plumeria.util.ratelimit import rate_limit
youtube = YouTube()
@commands.register('youtube', 'yt', 'ytsearch', cost=2, category='Search')
@channel_only
@rate_limit()
async def yt(message):
"""
Search YouTube for a video.
Example::
/yt knuckle puck copacetic
"""
if len(message.content.strip()):
videos = await youtube.search(message.content)
if len(videos):
return Response(videos[0].url)
else:
raise CommandError("No video found!")
|
from plumeria.api.youtube import YouTube
from plumeria.command import commands, CommandError, channel_only
from plumeria.message import Response
from plumeria.util.ratelimit import rate_limit
youtube = YouTube()
@commands.register('youtube', 'yt', 'ytsearch', cost=2, category='Search')
@rate_limit()
async def yt(message):
"""
Search YouTube for a video.
Example::
/yt knuckle puck copacetic
"""
if len(message.content.strip()):
videos = await youtube.search(message.content)
if len(videos):
return Response(videos[0].url)
else:
raise CommandError("No video found!")
|
Allow YouTube plugin to be used in PMs.
|
Allow YouTube plugin to be used in PMs.
|
Python
|
mit
|
sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria
|
from plumeria.api.youtube import YouTube
from plumeria.command import commands, CommandError, channel_only
from plumeria.message import Response
from plumeria.util.ratelimit import rate_limit
youtube = YouTube()
@commands.register('youtube', 'yt', 'ytsearch', cost=2, category='Search')
- @channel_only
@rate_limit()
async def yt(message):
"""
Search YouTube for a video.
Example::
/yt knuckle puck copacetic
"""
if len(message.content.strip()):
videos = await youtube.search(message.content)
if len(videos):
return Response(videos[0].url)
else:
raise CommandError("No video found!")
|
Allow YouTube plugin to be used in PMs.
|
## Code Before:
from plumeria.api.youtube import YouTube
from plumeria.command import commands, CommandError, channel_only
from plumeria.message import Response
from plumeria.util.ratelimit import rate_limit
youtube = YouTube()
@commands.register('youtube', 'yt', 'ytsearch', cost=2, category='Search')
@channel_only
@rate_limit()
async def yt(message):
"""
Search YouTube for a video.
Example::
/yt knuckle puck copacetic
"""
if len(message.content.strip()):
videos = await youtube.search(message.content)
if len(videos):
return Response(videos[0].url)
else:
raise CommandError("No video found!")
## Instruction:
Allow YouTube plugin to be used in PMs.
## Code After:
from plumeria.api.youtube import YouTube
from plumeria.command import commands, CommandError, channel_only
from plumeria.message import Response
from plumeria.util.ratelimit import rate_limit
youtube = YouTube()
@commands.register('youtube', 'yt', 'ytsearch', cost=2, category='Search')
@rate_limit()
async def yt(message):
"""
Search YouTube for a video.
Example::
/yt knuckle puck copacetic
"""
if len(message.content.strip()):
videos = await youtube.search(message.content)
if len(videos):
return Response(videos[0].url)
else:
raise CommandError("No video found!")
|
// ... existing code ...
@commands.register('youtube', 'yt', 'ytsearch', cost=2, category='Search')
@rate_limit()
// ... rest of the code ...
|
66cdb36231ff1192a8a2e6b15c4b8d524cfbff6d
|
powerline/renderers/pango_markup.py
|
powerline/renderers/pango_markup.py
|
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xmlrpclib import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xml.sax.saxutils import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
Use xml.sax.saxutils.escape in place of xmlrpclib.escape
|
Use xml.sax.saxutils.escape in place of xmlrpclib.escape
The latter is not available in python 3
|
Python
|
mit
|
dragon788/powerline,xxxhycl2010/powerline,cyrixhero/powerline,s0undt3ch/powerline,S0lll0s/powerline,EricSB/powerline,prvnkumar/powerline,blindFS/powerline,QuLogic/powerline,lukw00/powerline,dragon788/powerline,firebitsbr/powerline,Liangjianghao/powerline,DoctorJellyface/powerline,wfscheper/powerline,wfscheper/powerline,QuLogic/powerline,magus424/powerline,DoctorJellyface/powerline,junix/powerline,keelerm84/powerline,Luffin/powerline,xxxhycl2010/powerline,seanfisk/powerline,blindFS/powerline,areteix/powerline,darac/powerline,s0undt3ch/powerline,junix/powerline,seanfisk/powerline,cyrixhero/powerline,bezhermoso/powerline,xfumihiro/powerline,EricSB/powerline,xxxhycl2010/powerline,DoctorJellyface/powerline,lukw00/powerline,magus424/powerline,s0undt3ch/powerline,russellb/powerline,bartvm/powerline,areteix/powerline,Liangjianghao/powerline,wfscheper/powerline,S0lll0s/powerline,seanfisk/powerline,IvanAli/powerline,junix/powerline,bartvm/powerline,bezhermoso/powerline,EricSB/powerline,QuLogic/powerline,xfumihiro/powerline,Luffin/powerline,firebitsbr/powerline,russellb/powerline,bartvm/powerline,kenrachynski/powerline,bezhermoso/powerline,prvnkumar/powerline,Liangjianghao/powerline,darac/powerline,darac/powerline,kenrachynski/powerline,lukw00/powerline,prvnkumar/powerline,cyrixhero/powerline,magus424/powerline,IvanAli/powerline,areteix/powerline,Luffin/powerline,firebitsbr/powerline,keelerm84/powerline,russellb/powerline,S0lll0s/powerline,xfumihiro/powerline,blindFS/powerline,dragon788/powerline,IvanAli/powerline,kenrachynski/powerline
|
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
- from xmlrpclib import escape as _escape
+ from xml.sax.saxutils import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
Use xml.sax.saxutils.escape in place of xmlrpclib.escape
|
## Code Before:
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xmlrpclib import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
## Instruction:
Use xml.sax.saxutils.escape in place of xmlrpclib.escape
## Code After:
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xml.sax.saxutils import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
# ... existing code ...
from xml.sax.saxutils import escape as _escape
# ... rest of the code ...
|
321a0cea6a71e29a3f00116c52c1056d7dcfef7e
|
daskfunk/__init__.py
|
daskfunk/__init__.py
|
from __future__ import absolute_import, division, print_function
from .core import compile
from ._info import __version__
|
from __future__ import absolute_import, division, print_function
from .core import compile
|
Remove import of deleted file
|
Remove import of deleted file
|
Python
|
mit
|
Savvysherpa/dask-funk
|
from __future__ import absolute_import, division, print_function
from .core import compile
- from ._info import __version__
|
Remove import of deleted file
|
## Code Before:
from __future__ import absolute_import, division, print_function
from .core import compile
from ._info import __version__
## Instruction:
Remove import of deleted file
## Code After:
from __future__ import absolute_import, division, print_function
from .core import compile
|
# ... existing code ...
from .core import compile
# ... rest of the code ...
|
342e6134a63c5b575ae8e4348a54f61350bca2da
|
parser/crimeparser/pipelinesEnricher.py
|
parser/crimeparser/pipelinesEnricher.py
|
from geopy import Nominatim
from geopy.extra.rate_limiter import RateLimiter
class GeoCodePipeline(object):
def open_spider(self, spider):
geolocator = Nominatim(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
def process_item(self, item, spider):
for crime in item["crimes"]:
place = crime["place"]
latitude, longitude = self.__geocode_address(place)
crime["latitude"] = latitude
crime["longitude"] = longitude
return item
def __geocode_address(self, place):
if place is None:
return None, None
location = self.__geocodeFunc(place)
if location is not None:
return location.latitude, location.longitude
else:
return None, None
|
from geopy import Nominatim, Photon
from geopy.extra.rate_limiter import RateLimiter
class GeoCodePipeline(object):
def open_spider(self, spider):
geolocator = Photon(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
def process_item(self, item, spider):
for crime in item["crimes"]:
place = crime["place"]
latitude, longitude = self.__geocode_address(place)
crime["latitude"] = latitude
crime["longitude"] = longitude
return item
def __geocode_address(self, place):
if place is None:
return None, None
location = self.__geocodeFunc(place)
if location is not None:
return location.latitude, location.longitude
else:
return None, None
|
Use Phonon instead of Nominatim for geo coding
|
Use Phonon instead of Nominatim for geo coding
Phonon is more fault tolerant to spelling mistakes.
|
Python
|
mit
|
aberklotz/crimereport,aberklotz/crimereport,aberklotz/crimereport
|
- from geopy import Nominatim
+ from geopy import Nominatim, Photon
from geopy.extra.rate_limiter import RateLimiter
class GeoCodePipeline(object):
def open_spider(self, spider):
- geolocator = Nominatim(timeout=5)
+ geolocator = Photon(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
def process_item(self, item, spider):
for crime in item["crimes"]:
place = crime["place"]
latitude, longitude = self.__geocode_address(place)
crime["latitude"] = latitude
crime["longitude"] = longitude
return item
def __geocode_address(self, place):
if place is None:
return None, None
location = self.__geocodeFunc(place)
if location is not None:
return location.latitude, location.longitude
else:
return None, None
|
Use Phonon instead of Nominatim for geo coding
|
## Code Before:
from geopy import Nominatim
from geopy.extra.rate_limiter import RateLimiter
class GeoCodePipeline(object):
def open_spider(self, spider):
geolocator = Nominatim(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
def process_item(self, item, spider):
for crime in item["crimes"]:
place = crime["place"]
latitude, longitude = self.__geocode_address(place)
crime["latitude"] = latitude
crime["longitude"] = longitude
return item
def __geocode_address(self, place):
if place is None:
return None, None
location = self.__geocodeFunc(place)
if location is not None:
return location.latitude, location.longitude
else:
return None, None
## Instruction:
Use Phonon instead of Nominatim for geo coding
## Code After:
from geopy import Nominatim, Photon
from geopy.extra.rate_limiter import RateLimiter
class GeoCodePipeline(object):
def open_spider(self, spider):
geolocator = Photon(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
def process_item(self, item, spider):
for crime in item["crimes"]:
place = crime["place"]
latitude, longitude = self.__geocode_address(place)
crime["latitude"] = latitude
crime["longitude"] = longitude
return item
def __geocode_address(self, place):
if place is None:
return None, None
location = self.__geocodeFunc(place)
if location is not None:
return location.latitude, location.longitude
else:
return None, None
|
# ... existing code ...
from geopy import Nominatim, Photon
from geopy.extra.rate_limiter import RateLimiter
# ... modified code ...
def open_spider(self, spider):
geolocator = Photon(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
# ... rest of the code ...
|
570264014456ea0405af28feb92af7639fb7b7e3
|
metaopt/invoker/util/determine_package.py
|
metaopt/invoker/util/determine_package.py
|
from __future__ import division, print_function, with_statement
import inspect
import os
def determine_package(some_object):
"""
Resolves a call by object to a call by package.
- Determine absolute package name of the given object.
- When the task gets executed, the worker process will import it.
"""
# expand the module's path to an absolute import
filename = inspect.getsourcefile(some_object)
module_path, module_filename = os.path.split(filename)
module_name, _ = os.path.splitext(module_filename)
prefix = []
for directory in module_path.split(os.sep)[::-1]:
prefix.append(directory)
candidate = ".".join(prefix[::-1] + [module_name])
try:
__import__(name=candidate, globals=globals(), locals=locals(),
fromlist=[], level=0)
some_object = candidate
return some_object
except ImportError:
pass
raise ImportError("Could not determine the package of the given " +
"object. This should not happen.")
|
from __future__ import division, print_function, with_statement
import inspect
import os
def determine_package(some_object):
"""
Resolves a call by object to a call by package.
- Determine absolute package name of the given object.
- When the task gets executed, the worker process will import it.
"""
# expand the module's path to an absolute import
filename = inspect.getsourcefile(some_object)
module_path, module_filename = os.path.split(filename)
module_name, _ = os.path.splitext(module_filename)
prefix = []
for directory in module_path.split(os.sep)[::-1]:
prefix.append(directory)
candidate = ".".join(prefix[::-1] + [module_name])
if candidate.startswith("."):
candidate = candidate[1:]
try:
__import__(name=candidate, globals=globals(), locals=locals(),
fromlist=[], level=0)
some_object = candidate
return some_object
except ImportError:
pass
raise ImportError("Could not determine the package of the given " +
"object. This should not happen.")
|
Fix a bug (?) in detmine_package
|
Fix a bug (?) in detmine_package
Canditates have their first character removed if it is ".".
|
Python
|
bsd-3-clause
|
cigroup-ol/metaopt,cigroup-ol/metaopt,cigroup-ol/metaopt
|
from __future__ import division, print_function, with_statement
import inspect
import os
def determine_package(some_object):
"""
Resolves a call by object to a call by package.
- Determine absolute package name of the given object.
- When the task gets executed, the worker process will import it.
"""
# expand the module's path to an absolute import
filename = inspect.getsourcefile(some_object)
module_path, module_filename = os.path.split(filename)
module_name, _ = os.path.splitext(module_filename)
prefix = []
for directory in module_path.split(os.sep)[::-1]:
prefix.append(directory)
candidate = ".".join(prefix[::-1] + [module_name])
+
+ if candidate.startswith("."):
+ candidate = candidate[1:]
+
try:
__import__(name=candidate, globals=globals(), locals=locals(),
fromlist=[], level=0)
some_object = candidate
return some_object
except ImportError:
pass
raise ImportError("Could not determine the package of the given " +
"object. This should not happen.")
|
Fix a bug (?) in detmine_package
|
## Code Before:
from __future__ import division, print_function, with_statement
import inspect
import os
def determine_package(some_object):
"""
Resolves a call by object to a call by package.
- Determine absolute package name of the given object.
- When the task gets executed, the worker process will import it.
"""
# expand the module's path to an absolute import
filename = inspect.getsourcefile(some_object)
module_path, module_filename = os.path.split(filename)
module_name, _ = os.path.splitext(module_filename)
prefix = []
for directory in module_path.split(os.sep)[::-1]:
prefix.append(directory)
candidate = ".".join(prefix[::-1] + [module_name])
try:
__import__(name=candidate, globals=globals(), locals=locals(),
fromlist=[], level=0)
some_object = candidate
return some_object
except ImportError:
pass
raise ImportError("Could not determine the package of the given " +
"object. This should not happen.")
## Instruction:
Fix a bug (?) in detmine_package
## Code After:
from __future__ import division, print_function, with_statement
import inspect
import os
def determine_package(some_object):
"""
Resolves a call by object to a call by package.
- Determine absolute package name of the given object.
- When the task gets executed, the worker process will import it.
"""
# expand the module's path to an absolute import
filename = inspect.getsourcefile(some_object)
module_path, module_filename = os.path.split(filename)
module_name, _ = os.path.splitext(module_filename)
prefix = []
for directory in module_path.split(os.sep)[::-1]:
prefix.append(directory)
candidate = ".".join(prefix[::-1] + [module_name])
if candidate.startswith("."):
candidate = candidate[1:]
try:
__import__(name=candidate, globals=globals(), locals=locals(),
fromlist=[], level=0)
some_object = candidate
return some_object
except ImportError:
pass
raise ImportError("Could not determine the package of the given " +
"object. This should not happen.")
|
// ... existing code ...
candidate = ".".join(prefix[::-1] + [module_name])
if candidate.startswith("."):
candidate = candidate[1:]
try:
// ... rest of the code ...
|
96855ef5baee62f63887d942854c065ad6943f87
|
micropress/forms.py
|
micropress/forms.py
|
from django import forms
from micropress.models import Article, Section, Press
class ArticleForm(forms.ModelForm):
section = forms.ModelChoiceField(Section.objects.all(), empty_label=None)
class Meta:
model = Article
fields = ('title', 'slug', 'byline', 'section', 'body', 'markup_type')
class CreatePressForm(forms.ModelForm):
class Meta:
model = Press
exclude = ('content_type', 'object_id', 'realm')
|
from django import forms
from micropress.models import Article, Section, Press
class ArticleForm(forms.ModelForm):
section = forms.ModelChoiceField(Section.objects.all(), empty_label=None)
class Meta:
model = Article
fields = ('title', 'slug', 'byline', 'section', 'body', 'markup_type')
def clean(self):
cleaned_data = self.cleaned_data
slug = cleaned_data.get('slug', '')
press = self.instance.press
if Article.objects.filter(press=press, slug=slug).exists():
raise forms.ValidationError(
"Article with slug '{0}' already exists.".format(slug))
return cleaned_data
class CreatePressForm(forms.ModelForm):
class Meta:
model = Press
exclude = ('content_type', 'object_id', 'realm')
|
Validate that Article.slug and press are unique_together.
|
Validate that Article.slug and press are unique_together.
|
Python
|
mit
|
jbradberry/django-micro-press,jbradberry/django-micro-press
|
from django import forms
from micropress.models import Article, Section, Press
class ArticleForm(forms.ModelForm):
section = forms.ModelChoiceField(Section.objects.all(), empty_label=None)
class Meta:
model = Article
fields = ('title', 'slug', 'byline', 'section', 'body', 'markup_type')
+ def clean(self):
+ cleaned_data = self.cleaned_data
+ slug = cleaned_data.get('slug', '')
+ press = self.instance.press
+ if Article.objects.filter(press=press, slug=slug).exists():
+ raise forms.ValidationError(
+ "Article with slug '{0}' already exists.".format(slug))
+ return cleaned_data
+
class CreatePressForm(forms.ModelForm):
class Meta:
model = Press
exclude = ('content_type', 'object_id', 'realm')
|
Validate that Article.slug and press are unique_together.
|
## Code Before:
from django import forms
from micropress.models import Article, Section, Press
class ArticleForm(forms.ModelForm):
section = forms.ModelChoiceField(Section.objects.all(), empty_label=None)
class Meta:
model = Article
fields = ('title', 'slug', 'byline', 'section', 'body', 'markup_type')
class CreatePressForm(forms.ModelForm):
class Meta:
model = Press
exclude = ('content_type', 'object_id', 'realm')
## Instruction:
Validate that Article.slug and press are unique_together.
## Code After:
from django import forms
from micropress.models import Article, Section, Press
class ArticleForm(forms.ModelForm):
section = forms.ModelChoiceField(Section.objects.all(), empty_label=None)
class Meta:
model = Article
fields = ('title', 'slug', 'byline', 'section', 'body', 'markup_type')
def clean(self):
cleaned_data = self.cleaned_data
slug = cleaned_data.get('slug', '')
press = self.instance.press
if Article.objects.filter(press=press, slug=slug).exists():
raise forms.ValidationError(
"Article with slug '{0}' already exists.".format(slug))
return cleaned_data
class CreatePressForm(forms.ModelForm):
class Meta:
model = Press
exclude = ('content_type', 'object_id', 'realm')
|
...
def clean(self):
cleaned_data = self.cleaned_data
slug = cleaned_data.get('slug', '')
press = self.instance.press
if Article.objects.filter(press=press, slug=slug).exists():
raise forms.ValidationError(
"Article with slug '{0}' already exists.".format(slug))
return cleaned_data
...
|
3af95029c3f784e17247abcd0123156ff9384513
|
pronto/serializers/base.py
|
pronto/serializers/base.py
|
import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO, encoding: str = "utf-8") -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
|
import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO) -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
|
Fix signature of `BaseSerializer.dump` to remove `encoding` argument
|
Fix signature of `BaseSerializer.dump` to remove `encoding` argument
|
Python
|
mit
|
althonos/pronto
|
import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
- def dump(self, file: BinaryIO, encoding: str = "utf-8") -> None:
+ def dump(self, file: BinaryIO) -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
|
Fix signature of `BaseSerializer.dump` to remove `encoding` argument
|
## Code Before:
import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO, encoding: str = "utf-8") -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
## Instruction:
Fix signature of `BaseSerializer.dump` to remove `encoding` argument
## Code After:
import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO) -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
|
// ... existing code ...
@abc.abstractmethod
def dump(self, file: BinaryIO) -> None:
return NotImplemented
// ... rest of the code ...
|
9fdd24ed20c553638cde7c67f994ea72da0ef149
|
tests/conftest.py
|
tests/conftest.py
|
import shutil
from pathlib import Path
import pytest
from pandas_profiling.utils.cache import cache_file
@pytest.fixture(scope="function")
def get_data_file(tmpdir):
def getter(file_name, url):
source_file = cache_file(file_name, url)
# Move to temporary directory
test_path = Path(str(tmpdir))
shutil.copy(source_file, test_path / file_name)
return test_path / file_name
return getter
@pytest.fixture(scope="module")
def test_output_dir(tmpdir_factory):
test_path = Path(str(tmpdir_factory.mktemp("test")))
yield test_path
shutil.rmtree(str(test_path))
|
import shutil
from pathlib import Path
import pytest
from pandas_profiling.utils.cache import cache_file
@pytest.fixture(scope="function")
def get_data_file(tmpdir):
def getter(file_name, url):
source_file = cache_file(file_name, url)
# Move to temporary directory
test_path = Path(str(tmpdir))
shutil.copy(str(source_file), str(test_path / file_name))
return str(test_path / file_name)
return getter
@pytest.fixture(scope="module")
def test_output_dir(tmpdir_factory):
test_path = Path(str(tmpdir_factory.mktemp("test")))
yield test_path
shutil.rmtree(str(test_path))
|
Convert Path to str for Python 3.5
|
Convert Path to str for Python 3.5
|
Python
|
mit
|
JosPolfliet/pandas-profiling,JosPolfliet/pandas-profiling
|
import shutil
from pathlib import Path
import pytest
from pandas_profiling.utils.cache import cache_file
@pytest.fixture(scope="function")
def get_data_file(tmpdir):
def getter(file_name, url):
source_file = cache_file(file_name, url)
# Move to temporary directory
test_path = Path(str(tmpdir))
- shutil.copy(source_file, test_path / file_name)
+ shutil.copy(str(source_file), str(test_path / file_name))
- return test_path / file_name
+ return str(test_path / file_name)
return getter
@pytest.fixture(scope="module")
def test_output_dir(tmpdir_factory):
test_path = Path(str(tmpdir_factory.mktemp("test")))
yield test_path
shutil.rmtree(str(test_path))
|
Convert Path to str for Python 3.5
|
## Code Before:
import shutil
from pathlib import Path
import pytest
from pandas_profiling.utils.cache import cache_file
@pytest.fixture(scope="function")
def get_data_file(tmpdir):
def getter(file_name, url):
source_file = cache_file(file_name, url)
# Move to temporary directory
test_path = Path(str(tmpdir))
shutil.copy(source_file, test_path / file_name)
return test_path / file_name
return getter
@pytest.fixture(scope="module")
def test_output_dir(tmpdir_factory):
test_path = Path(str(tmpdir_factory.mktemp("test")))
yield test_path
shutil.rmtree(str(test_path))
## Instruction:
Convert Path to str for Python 3.5
## Code After:
import shutil
from pathlib import Path
import pytest
from pandas_profiling.utils.cache import cache_file
@pytest.fixture(scope="function")
def get_data_file(tmpdir):
def getter(file_name, url):
source_file = cache_file(file_name, url)
# Move to temporary directory
test_path = Path(str(tmpdir))
shutil.copy(str(source_file), str(test_path / file_name))
return str(test_path / file_name)
return getter
@pytest.fixture(scope="module")
def test_output_dir(tmpdir_factory):
test_path = Path(str(tmpdir_factory.mktemp("test")))
yield test_path
shutil.rmtree(str(test_path))
|
...
test_path = Path(str(tmpdir))
shutil.copy(str(source_file), str(test_path / file_name))
return str(test_path / file_name)
...
|
fde84efc866d2276eac5faed0af3df5a672664f5
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
local('python sana_builder/manage.py syncdb')
local('python sana_builder/manage.py test webapp')
def update_host():
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
def travis_deploy():
update_host()
def local_deploy():
local('git push origin master')
update_host()
|
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
local('python sana_builder/manage.py syncdb --noinput')
local('python sana_builder/manage.py test webapp --noinput')
def update_host():
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
def travis_deploy():
update_host()
def local_deploy():
local('git push origin master')
update_host()
|
Remove input from fab test
|
Remove input from fab test
|
Python
|
bsd-3-clause
|
SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder
|
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
- local('python sana_builder/manage.py syncdb')
+ local('python sana_builder/manage.py syncdb --noinput')
- local('python sana_builder/manage.py test webapp')
+ local('python sana_builder/manage.py test webapp --noinput')
def update_host():
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
def travis_deploy():
update_host()
def local_deploy():
local('git push origin master')
update_host()
|
Remove input from fab test
|
## Code Before:
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
local('python sana_builder/manage.py syncdb')
local('python sana_builder/manage.py test webapp')
def update_host():
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
def travis_deploy():
update_host()
def local_deploy():
local('git push origin master')
update_host()
## Instruction:
Remove input from fab test
## Code After:
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
local('python sana_builder/manage.py syncdb --noinput')
local('python sana_builder/manage.py test webapp --noinput')
def update_host():
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
def travis_deploy():
update_host()
def local_deploy():
local('git push origin master')
update_host()
|
// ... existing code ...
def test():
local('python sana_builder/manage.py syncdb --noinput')
local('python sana_builder/manage.py test webapp --noinput')
// ... rest of the code ...
|
e5c8379c987d2d7ae60d5f9321bb96f278549167
|
apel/parsers/__init__.py
|
apel/parsers/__init__.py
|
'''
Copyright (C) 2012 STFC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Package with Apel parsers.
@author: Will Rogers, Konrad Jopek
'''
LOGGER_ID = 'parser'
from parser import Parser
from blah import BlahParser
from lsf import LSFParser
from pbs import PBSParser
from sge import SGEParser
from slurm import SlurmParser
|
'''
Copyright (C) 2012 STFC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Package with Apel parsers.
@author: Will Rogers, Konrad Jopek
'''
LOGGER_ID = 'parser'
from parser import Parser
from blah import BlahParser
from htcondor import HTCondorParser
from lsf import LSFParser
from pbs import PBSParser
from sge import SGEParser
from slurm import SlurmParser
|
Add HTCondorParser to init imports
|
Add HTCondorParser to init imports
|
Python
|
apache-2.0
|
apel/apel,tofu-rocketry/apel,tofu-rocketry/apel,stfc/apel,apel/apel,stfc/apel
|
'''
Copyright (C) 2012 STFC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Package with Apel parsers.
@author: Will Rogers, Konrad Jopek
'''
LOGGER_ID = 'parser'
from parser import Parser
from blah import BlahParser
+ from htcondor import HTCondorParser
from lsf import LSFParser
from pbs import PBSParser
from sge import SGEParser
from slurm import SlurmParser
|
Add HTCondorParser to init imports
|
## Code Before:
'''
Copyright (C) 2012 STFC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Package with Apel parsers.
@author: Will Rogers, Konrad Jopek
'''
LOGGER_ID = 'parser'
from parser import Parser
from blah import BlahParser
from lsf import LSFParser
from pbs import PBSParser
from sge import SGEParser
from slurm import SlurmParser
## Instruction:
Add HTCondorParser to init imports
## Code After:
'''
Copyright (C) 2012 STFC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Package with Apel parsers.
@author: Will Rogers, Konrad Jopek
'''
LOGGER_ID = 'parser'
from parser import Parser
from blah import BlahParser
from htcondor import HTCondorParser
from lsf import LSFParser
from pbs import PBSParser
from sge import SGEParser
from slurm import SlurmParser
|
# ... existing code ...
from blah import BlahParser
from htcondor import HTCondorParser
from lsf import LSFParser
# ... rest of the code ...
|
80b05e0cd3d73529d37843d398857289d5717e44
|
wagtail/tests/migrations/0005_auto_20141113_0642.py
|
wagtail/tests/migrations/0005_auto_20141113_0642.py
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tests', '0004_auto_20141008_0420'),
]
operations = [
migrations.AlterField(
model_name='formfield',
name='choices',
field=models.CharField(help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', max_length=512, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='formfield',
name='default_value',
field=models.CharField(help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, blank=True),
preserve_default=True,
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0002_initial_data'),
('tests', '0004_auto_20141008_0420'),
]
operations = [
migrations.AlterField(
model_name='formfield',
name='choices',
field=models.CharField(help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', max_length=512, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='formfield',
name='default_value',
field=models.CharField(help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, blank=True),
preserve_default=True,
),
]
|
Add dependency on wagtailcore migration 0002 (necessary to cleanly merge the other migration 0005 being added in 0.9)
|
Add dependency on wagtailcore migration 0002 (necessary to cleanly merge the other migration 0005 being added in 0.9)
|
Python
|
bsd-3-clause
|
rsalmaso/wagtail,mikedingjan/wagtail,Toshakins/wagtail,dresiu/wagtail,nilnvoid/wagtail,iansprice/wagtail,kurtw/wagtail,takeflight/wagtail,thenewguy/wagtail,dresiu/wagtail,nutztherookie/wagtail,thenewguy/wagtail,mikedingjan/wagtail,mixxorz/wagtail,takeflight/wagtail,torchbox/wagtail,JoshBarr/wagtail,nealtodd/wagtail,jorge-marques/wagtail,takeshineshiro/wagtail,jnns/wagtail,chrxr/wagtail,inonit/wagtail,nrsimha/wagtail,jordij/wagtail,jorge-marques/wagtail,inonit/wagtail,Pennebaker/wagtail,nimasmi/wagtail,thenewguy/wagtail,taedori81/wagtail,tangentlabs/wagtail,benjaoming/wagtail,jnns/wagtail,mjec/wagtail,WQuanfeng/wagtail,nilnvoid/wagtail,m-sanders/wagtail,mixxorz/wagtail,kurtrwall/wagtail,mayapurmedia/wagtail,chrxr/wagtail,kaedroho/wagtail,quru/wagtail,rjsproxy/wagtail,jorge-marques/wagtail,bjesus/wagtail,benjaoming/wagtail,chrxr/wagtail,nutztherookie/wagtail,kaedroho/wagtail,takeshineshiro/wagtail,kaedroho/wagtail,mephizzle/wagtail,timorieber/wagtail,gogobook/wagtail,mjec/wagtail,timorieber/wagtail,kurtrwall/wagtail,rv816/wagtail,kaedroho/wagtail,janusnic/wagtail,gogobook/wagtail,Toshakins/wagtail,nimasmi/wagtail,takeshineshiro/wagtail,Toshakins/wagtail,KimGlazebrook/wagtail-experiment,KimGlazebrook/wagtail-experiment,tangentlabs/wagtail,inonit/wagtail,davecranwell/wagtail,wagtail/wagtail,gogobook/wagtail,darith27/wagtail,wagtail/wagtail,rsalmaso/wagtail,mjec/wagtail,davecranwell/wagtail,chimeno/wagtail,iho/wagtail,jorge-marques/wagtail,gasman/wagtail,quru/wagtail,serzans/wagtail,nutztherookie/wagtail,gasman/wagtail,iho/wagtail,takeshineshiro/wagtail,quru/wagtail,rsalmaso/wagtail,taedori81/wagtail,kurtrwall/wagtail,zerolab/wagtail,Pennebaker/wagtail,taedori81/wagtail,wagtail/wagtail,iansprice/wagtail,kurtw/wagtail,marctc/wagtail,m-sanders/wagtail,nrsimha/wagtail,Toshakins/wagtail,Tivix/wagtail,bjesus/wagtail,nimasmi/wagtail,chimeno/wagtail,nilnvoid/wagtail,chrxr/wagtail,zerolab/wagtail,serzans/wagtail,jordij/wagtail,hamsterbacke23/wagtail,nrsimha/wagtail,quru/wagtail,Pennebaker/wagtail,nilnvoid/wagtail,FlipperPA/wagtail,WQuanfeng/wagtail,jnns/wagtail,serzans/wagtail,rjsproxy/wagtail,timorieber/wagtail,gasman/wagtail,kurtrwall/wagtail,hanpama/wagtail,KimGlazebrook/wagtail-experiment,mayapurmedia/wagtail,FlipperPA/wagtail,marctc/wagtail,kurtw/wagtail,iho/wagtail,tangentlabs/wagtail,FlipperPA/wagtail,Pennebaker/wagtail,zerolab/wagtail,mixxorz/wagtail,iho/wagtail,rjsproxy/wagtail,jnns/wagtail,wagtail/wagtail,rjsproxy/wagtail,timorieber/wagtail,nimasmi/wagtail,mephizzle/wagtail,davecranwell/wagtail,Klaudit/wagtail,taedori81/wagtail,mikedingjan/wagtail,rv816/wagtail,torchbox/wagtail,hanpama/wagtail,stevenewey/wagtail,Klaudit/wagtail,mikedingjan/wagtail,janusnic/wagtail,bjesus/wagtail,darith27/wagtail,takeflight/wagtail,mjec/wagtail,wagtail/wagtail,marctc/wagtail,thenewguy/wagtail,nutztherookie/wagtail,JoshBarr/wagtail,hamsterbacke23/wagtail,JoshBarr/wagtail,nealtodd/wagtail,takeflight/wagtail,stevenewey/wagtail,hamsterbacke23/wagtail,hanpama/wagtail,mephizzle/wagtail,mephizzle/wagtail,davecranwell/wagtail,nealtodd/wagtail,m-sanders/wagtail,WQuanfeng/wagtail,FlipperPA/wagtail,stevenewey/wagtail,torchbox/wagtail,dresiu/wagtail,torchbox/wagtail,mayapurmedia/wagtail,hanpama/wagtail,gasman/wagtail,Tivix/wagtail,chimeno/wagtail,hamsterbacke23/wagtail,janusnic/wagtail,taedori81/wagtail,Tivix/wagtail,rsalmaso/wagtail,iansprice/wagtail,zerolab/wagtail,darith27/wagtail,m-sanders/wagtail,janusnic/wagtail,KimGlazebrook/wagtail-experiment,zerolab/wagtail,chimeno/wagtail,Klaudit/wagtail,benjaoming/wagtail,Tivix/wagtail,dresiu/wagtail,bjesus/wagtail,marctc/wagtail,mixxorz/wagtail,rsalmaso/wagtail,Klaudit/wagtail,JoshBarr/wagtail,kurtw/wagtail,jordij/wagtail,rv816/wagtail,WQuanfeng/wagtail,rv816/wagtail,mixxorz/wagtail,gogobook/wagtail,stevenewey/wagtail,dresiu/wagtail,chimeno/wagtail,jorge-marques/wagtail,thenewguy/wagtail,benjaoming/wagtail,serzans/wagtail,iansprice/wagtail,nealtodd/wagtail,kaedroho/wagtail,mayapurmedia/wagtail,gasman/wagtail,nrsimha/wagtail,inonit/wagtail,tangentlabs/wagtail,darith27/wagtail,jordij/wagtail
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
+ ('wagtailcore', '0002_initial_data'),
('tests', '0004_auto_20141008_0420'),
]
operations = [
migrations.AlterField(
model_name='formfield',
name='choices',
field=models.CharField(help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', max_length=512, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='formfield',
name='default_value',
field=models.CharField(help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, blank=True),
preserve_default=True,
),
]
|
Add dependency on wagtailcore migration 0002 (necessary to cleanly merge the other migration 0005 being added in 0.9)
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tests', '0004_auto_20141008_0420'),
]
operations = [
migrations.AlterField(
model_name='formfield',
name='choices',
field=models.CharField(help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', max_length=512, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='formfield',
name='default_value',
field=models.CharField(help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, blank=True),
preserve_default=True,
),
]
## Instruction:
Add dependency on wagtailcore migration 0002 (necessary to cleanly merge the other migration 0005 being added in 0.9)
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0002_initial_data'),
('tests', '0004_auto_20141008_0420'),
]
operations = [
migrations.AlterField(
model_name='formfield',
name='choices',
field=models.CharField(help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', max_length=512, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='formfield',
name='default_value',
field=models.CharField(help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, blank=True),
preserve_default=True,
),
]
|
# ... existing code ...
dependencies = [
('wagtailcore', '0002_initial_data'),
('tests', '0004_auto_20141008_0420'),
# ... rest of the code ...
|
569c056e016131ec4325185ee9fe814018d5e1fe
|
server/bands/__init__.py
|
server/bands/__init__.py
|
from flask import session, redirect, url_for, g, jsonify, Response
from flask.views import MethodView
from server.models import Band
class RestrictedBandPage(MethodView):
def dispatch_request(self, *args, **kwargs):
if not 'bandId' in session:
return redirect(url_for('bands.session.index'))
else:
self.band = Band.query.get(session['bandId'])
if not self.band:
return redirect(url_for('bands.session.index'))
else:
g.band = self.band
return super(RestrictedBandPage, self).dispatch_request(*args, **kwargs)
class AjaxException(Exception):
errors = []
def __init__(self, *args):
super(Exception, self).__init__()
self.errors = args
AJAX_SUCCESS = Response(200)
class AjaxForm(MethodView):
def post(self):
if self.form.validate_on_submit():
try:
result = self.on_submit()
if type(result) is Response:
return result
else:
return jsonify(result)
except AjaxException as e:
errors = self.form.errors
if len(e.errors) > 0:
errors['general'] = e.errors
return jsonify(errors=errors), 400
else:
return jsonify(errors=self.form.errors), 400
|
from flask import session, redirect, url_for, g, jsonify, Response
from flask.views import MethodView
from server.models import Band
class RestrictedBandPage(MethodView):
def dispatch_request(self, *args, **kwargs):
if not 'bandId' in session:
return redirect(url_for('bands.session.index'))
else:
self.band = Band.query.get(session['bandId'])
if not self.band:
del session['bandId']
return redirect(url_for('bands.session.index'))
else:
g.band = self.band
return super(RestrictedBandPage, self).dispatch_request(*args, **kwargs)
class AjaxException(Exception):
errors = []
def __init__(self, *args):
super(Exception, self).__init__()
self.errors = args
AJAX_SUCCESS = Response(200)
class AjaxForm(MethodView):
def post(self):
if self.form.validate_on_submit():
try:
result = self.on_submit()
if type(result) is Response:
return result
else:
return jsonify(result)
except AjaxException as e:
errors = self.form.errors
if len(e.errors) > 0:
errors['general'] = e.errors
return jsonify(errors=errors), 400
else:
return jsonify(errors=self.form.errors), 400
|
Fix problem on no-longer existing bands that are still as logged in session available
|
Fix problem on no-longer existing bands that are still as logged in session available
|
Python
|
apache-2.0
|
dennisausbremen/tunefish,dennisausbremen/tunefish,dennisausbremen/tunefish
|
from flask import session, redirect, url_for, g, jsonify, Response
from flask.views import MethodView
from server.models import Band
class RestrictedBandPage(MethodView):
def dispatch_request(self, *args, **kwargs):
if not 'bandId' in session:
return redirect(url_for('bands.session.index'))
else:
self.band = Band.query.get(session['bandId'])
if not self.band:
+ del session['bandId']
return redirect(url_for('bands.session.index'))
else:
g.band = self.band
return super(RestrictedBandPage, self).dispatch_request(*args, **kwargs)
class AjaxException(Exception):
errors = []
def __init__(self, *args):
super(Exception, self).__init__()
self.errors = args
AJAX_SUCCESS = Response(200)
class AjaxForm(MethodView):
def post(self):
if self.form.validate_on_submit():
try:
result = self.on_submit()
if type(result) is Response:
return result
else:
return jsonify(result)
except AjaxException as e:
errors = self.form.errors
if len(e.errors) > 0:
errors['general'] = e.errors
return jsonify(errors=errors), 400
else:
return jsonify(errors=self.form.errors), 400
|
Fix problem on no-longer existing bands that are still as logged in session available
|
## Code Before:
from flask import session, redirect, url_for, g, jsonify, Response
from flask.views import MethodView
from server.models import Band
class RestrictedBandPage(MethodView):
def dispatch_request(self, *args, **kwargs):
if not 'bandId' in session:
return redirect(url_for('bands.session.index'))
else:
self.band = Band.query.get(session['bandId'])
if not self.band:
return redirect(url_for('bands.session.index'))
else:
g.band = self.band
return super(RestrictedBandPage, self).dispatch_request(*args, **kwargs)
class AjaxException(Exception):
errors = []
def __init__(self, *args):
super(Exception, self).__init__()
self.errors = args
AJAX_SUCCESS = Response(200)
class AjaxForm(MethodView):
def post(self):
if self.form.validate_on_submit():
try:
result = self.on_submit()
if type(result) is Response:
return result
else:
return jsonify(result)
except AjaxException as e:
errors = self.form.errors
if len(e.errors) > 0:
errors['general'] = e.errors
return jsonify(errors=errors), 400
else:
return jsonify(errors=self.form.errors), 400
## Instruction:
Fix problem on no-longer existing bands that are still as logged in session available
## Code After:
from flask import session, redirect, url_for, g, jsonify, Response
from flask.views import MethodView
from server.models import Band
class RestrictedBandPage(MethodView):
def dispatch_request(self, *args, **kwargs):
if not 'bandId' in session:
return redirect(url_for('bands.session.index'))
else:
self.band = Band.query.get(session['bandId'])
if not self.band:
del session['bandId']
return redirect(url_for('bands.session.index'))
else:
g.band = self.band
return super(RestrictedBandPage, self).dispatch_request(*args, **kwargs)
class AjaxException(Exception):
errors = []
def __init__(self, *args):
super(Exception, self).__init__()
self.errors = args
AJAX_SUCCESS = Response(200)
class AjaxForm(MethodView):
def post(self):
if self.form.validate_on_submit():
try:
result = self.on_submit()
if type(result) is Response:
return result
else:
return jsonify(result)
except AjaxException as e:
errors = self.form.errors
if len(e.errors) > 0:
errors['general'] = e.errors
return jsonify(errors=errors), 400
else:
return jsonify(errors=self.form.errors), 400
|
# ... existing code ...
if not self.band:
del session['bandId']
return redirect(url_for('bands.session.index'))
# ... rest of the code ...
|
bb80ef40356be4384b0ddf0e4510865d4d33c654
|
appengine_config.py
|
appengine_config.py
|
import site
import os.path
# add `lib` subdirectory as a site packages directory, so our `main` module can load
# third-party libraries.
site.addsitedir(os.path.join(os.path.dirname(__file__), 'lib'))
|
from google.appengine.ext import vendor
vendor.add('lib')
|
Use a newer method for specifying the vendored packages directory.
|
Use a newer method for specifying the vendored packages directory.
|
Python
|
mit
|
boulder-python/boulderpython.org,boulder-python/boulderpython.org,boulder-python/boulderpython.org,boulder-python/boulderpython.org
|
- import site
- import os.path
+ from google.appengine.ext import vendor
+ vendor.add('lib')
- # add `lib` subdirectory as a site packages directory, so our `main` module can load
- # third-party libraries.
- site.addsitedir(os.path.join(os.path.dirname(__file__), 'lib'))
-
|
Use a newer method for specifying the vendored packages directory.
|
## Code Before:
import site
import os.path
# add `lib` subdirectory as a site packages directory, so our `main` module can load
# third-party libraries.
site.addsitedir(os.path.join(os.path.dirname(__file__), 'lib'))
## Instruction:
Use a newer method for specifying the vendored packages directory.
## Code After:
from google.appengine.ext import vendor
vendor.add('lib')
|
// ... existing code ...
from google.appengine.ext import vendor
vendor.add('lib')
// ... rest of the code ...
|
8ecf97b338dd37eaf5a4e2672e33e27cc40d215d
|
sacred/observers/__init__.py
|
sacred/observers/__init__.py
|
from __future__ import division, print_function, unicode_literals
from sacred.commandline_options import CommandLineOption
from sacred.observers.base import RunObserver
from sacred.observers.file_storage import FileStorageObserver
import sacred.optional as opt
from sacred.observers.tinydb_hashfs import TinyDbObserver
if opt.has_pymongo:
from sacred.observers.mongo import MongoObserver
else:
MongoObserver = opt.MissingDependencyMock('pymongo')
class MongoDbOption(CommandLineOption):
"""To use the MongoObserver you need to install pymongo first."""
arg = 'DB'
@classmethod
def apply(cls, args, run):
raise ImportError('cannot use -m/--mongo_db flag: '
'missing pymongo dependency')
if opt.has_sqlalchemy:
from sacred.observers.sql import SqlObserver
else:
SqlObserver = opt.MissingDependencyMock('sqlalchemy')
class SqlOption(CommandLineOption):
"""To use the SqlObserver you need to install sqlalchemy first."""
arg = 'DB_URL'
@classmethod
def apply(cls, args, run):
raise ImportError('cannot use -s/--sql flag: '
'missing sqlalchemy dependency')
__all__ = ('FileStorageObserver', 'RunObserver', 'MongoObserver',
'SqlObserver', 'TinyDbObserver')
|
from __future__ import division, print_function, unicode_literals
from sacred.commandline_options import CommandLineOption
from sacred.observers.base import RunObserver
from sacred.observers.file_storage import FileStorageObserver
import sacred.optional as opt
from sacred.observers.tinydb_hashfs import TinyDbObserver, TinyDbReader
if opt.has_pymongo:
from sacred.observers.mongo import MongoObserver
else:
MongoObserver = opt.MissingDependencyMock('pymongo')
class MongoDbOption(CommandLineOption):
"""To use the MongoObserver you need to install pymongo first."""
arg = 'DB'
@classmethod
def apply(cls, args, run):
raise ImportError('cannot use -m/--mongo_db flag: '
'missing pymongo dependency')
if opt.has_sqlalchemy:
from sacred.observers.sql import SqlObserver
else:
SqlObserver = opt.MissingDependencyMock('sqlalchemy')
class SqlOption(CommandLineOption):
"""To use the SqlObserver you need to install sqlalchemy first."""
arg = 'DB_URL'
@classmethod
def apply(cls, args, run):
raise ImportError('cannot use -s/--sql flag: '
'missing sqlalchemy dependency')
__all__ = ('FileStorageObserver', 'RunObserver', 'MongoObserver',
'SqlObserver', 'TinyDbObserver', 'TinyDbReader')
|
Add TinyDbReader to observers init
|
Add TinyDbReader to observers init
|
Python
|
mit
|
IDSIA/sacred,IDSIA/sacred
|
from __future__ import division, print_function, unicode_literals
from sacred.commandline_options import CommandLineOption
from sacred.observers.base import RunObserver
from sacred.observers.file_storage import FileStorageObserver
import sacred.optional as opt
- from sacred.observers.tinydb_hashfs import TinyDbObserver
+ from sacred.observers.tinydb_hashfs import TinyDbObserver, TinyDbReader
if opt.has_pymongo:
from sacred.observers.mongo import MongoObserver
else:
MongoObserver = opt.MissingDependencyMock('pymongo')
class MongoDbOption(CommandLineOption):
"""To use the MongoObserver you need to install pymongo first."""
arg = 'DB'
@classmethod
def apply(cls, args, run):
raise ImportError('cannot use -m/--mongo_db flag: '
'missing pymongo dependency')
if opt.has_sqlalchemy:
from sacred.observers.sql import SqlObserver
else:
SqlObserver = opt.MissingDependencyMock('sqlalchemy')
class SqlOption(CommandLineOption):
"""To use the SqlObserver you need to install sqlalchemy first."""
arg = 'DB_URL'
@classmethod
def apply(cls, args, run):
raise ImportError('cannot use -s/--sql flag: '
'missing sqlalchemy dependency')
__all__ = ('FileStorageObserver', 'RunObserver', 'MongoObserver',
- 'SqlObserver', 'TinyDbObserver')
+ 'SqlObserver', 'TinyDbObserver', 'TinyDbReader')
|
Add TinyDbReader to observers init
|
## Code Before:
from __future__ import division, print_function, unicode_literals
from sacred.commandline_options import CommandLineOption
from sacred.observers.base import RunObserver
from sacred.observers.file_storage import FileStorageObserver
import sacred.optional as opt
from sacred.observers.tinydb_hashfs import TinyDbObserver
if opt.has_pymongo:
from sacred.observers.mongo import MongoObserver
else:
MongoObserver = opt.MissingDependencyMock('pymongo')
class MongoDbOption(CommandLineOption):
"""To use the MongoObserver you need to install pymongo first."""
arg = 'DB'
@classmethod
def apply(cls, args, run):
raise ImportError('cannot use -m/--mongo_db flag: '
'missing pymongo dependency')
if opt.has_sqlalchemy:
from sacred.observers.sql import SqlObserver
else:
SqlObserver = opt.MissingDependencyMock('sqlalchemy')
class SqlOption(CommandLineOption):
"""To use the SqlObserver you need to install sqlalchemy first."""
arg = 'DB_URL'
@classmethod
def apply(cls, args, run):
raise ImportError('cannot use -s/--sql flag: '
'missing sqlalchemy dependency')
__all__ = ('FileStorageObserver', 'RunObserver', 'MongoObserver',
'SqlObserver', 'TinyDbObserver')
## Instruction:
Add TinyDbReader to observers init
## Code After:
from __future__ import division, print_function, unicode_literals
from sacred.commandline_options import CommandLineOption
from sacred.observers.base import RunObserver
from sacred.observers.file_storage import FileStorageObserver
import sacred.optional as opt
from sacred.observers.tinydb_hashfs import TinyDbObserver, TinyDbReader
if opt.has_pymongo:
from sacred.observers.mongo import MongoObserver
else:
MongoObserver = opt.MissingDependencyMock('pymongo')
class MongoDbOption(CommandLineOption):
"""To use the MongoObserver you need to install pymongo first."""
arg = 'DB'
@classmethod
def apply(cls, args, run):
raise ImportError('cannot use -m/--mongo_db flag: '
'missing pymongo dependency')
if opt.has_sqlalchemy:
from sacred.observers.sql import SqlObserver
else:
SqlObserver = opt.MissingDependencyMock('sqlalchemy')
class SqlOption(CommandLineOption):
"""To use the SqlObserver you need to install sqlalchemy first."""
arg = 'DB_URL'
@classmethod
def apply(cls, args, run):
raise ImportError('cannot use -s/--sql flag: '
'missing sqlalchemy dependency')
__all__ = ('FileStorageObserver', 'RunObserver', 'MongoObserver',
'SqlObserver', 'TinyDbObserver', 'TinyDbReader')
|
...
import sacred.optional as opt
from sacred.observers.tinydb_hashfs import TinyDbObserver, TinyDbReader
...
__all__ = ('FileStorageObserver', 'RunObserver', 'MongoObserver',
'SqlObserver', 'TinyDbObserver', 'TinyDbReader')
...
|
a633fd37a4d795e7b565254ef10aaa0f2ad77f31
|
vcontrol/rest/machines/shutdown.py
|
vcontrol/rest/machines/shutdown.py
|
from ..helpers import get_allowed
import subprocess
import web
class ShutdownMachineR:
"""
This endpoint is for shutting down a running machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
def GET(self, machine):
web.header('Access-Control-Allow-Origin', self.allow_origin)
try:
out = subprocess.check_output("/usr/local/bin/docker-machine stop "+machine, shell=True)
except:
out = "unable to stop machine"
return str(out)
|
from ..helpers import get_allowed
import subprocess
import web
class ShutdownMachineR:
"""
This endpoint is for shutting down a running machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
def GET(self, machine):
try:
web.header('Access-Control-Allow-Origin', self.allow_origin)
except Exception as e: # no pragma
print(e.message)
try:
out = subprocess.check_output("/usr/local/bin/docker-machine stop "+machine, shell=True)
except:
out = "unable to stop machine"
return str(out)
|
Put the web.header function in a try/except block
|
Put the web.header function in a try/except block
|
Python
|
apache-2.0
|
cglewis/vcontrol,CyberReboot/vcontrol,CyberReboot/vcontrol,cglewis/vcontrol,CyberReboot/vcontrol,cglewis/vcontrol
|
from ..helpers import get_allowed
import subprocess
import web
+
class ShutdownMachineR:
"""
This endpoint is for shutting down a running machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
+
def GET(self, machine):
+ try:
- web.header('Access-Control-Allow-Origin', self.allow_origin)
+ web.header('Access-Control-Allow-Origin', self.allow_origin)
+ except Exception as e: # no pragma
+ print(e.message)
try:
out = subprocess.check_output("/usr/local/bin/docker-machine stop "+machine, shell=True)
except:
out = "unable to stop machine"
return str(out)
|
Put the web.header function in a try/except block
|
## Code Before:
from ..helpers import get_allowed
import subprocess
import web
class ShutdownMachineR:
"""
This endpoint is for shutting down a running machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
def GET(self, machine):
web.header('Access-Control-Allow-Origin', self.allow_origin)
try:
out = subprocess.check_output("/usr/local/bin/docker-machine stop "+machine, shell=True)
except:
out = "unable to stop machine"
return str(out)
## Instruction:
Put the web.header function in a try/except block
## Code After:
from ..helpers import get_allowed
import subprocess
import web
class ShutdownMachineR:
"""
This endpoint is for shutting down a running machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
def GET(self, machine):
try:
web.header('Access-Control-Allow-Origin', self.allow_origin)
except Exception as e: # no pragma
print(e.message)
try:
out = subprocess.check_output("/usr/local/bin/docker-machine stop "+machine, shell=True)
except:
out = "unable to stop machine"
return str(out)
|
# ... existing code ...
import web
# ... modified code ...
allow_origin, rest_url = get_allowed.get_allowed()
def GET(self, machine):
try:
web.header('Access-Control-Allow-Origin', self.allow_origin)
except Exception as e: # no pragma
print(e.message)
try:
# ... rest of the code ...
|
b3850c475e449c0c6182629aa7521f335e86b1e1
|
scrapy_local.py
|
scrapy_local.py
|
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'
|
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
Fix issue with scrapy local settings
|
Fix issue with scrapy local settings
|
Python
|
mit
|
comsaint/legco-watch,comsaint/legco-watch,comsaint/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,comsaint/legco-watch
|
import os
# use this for running scrapy directly
- # PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
+ PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
- # FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
+ FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
- # Use this for deploying to scrapyd, as it would be in stage/production
- FILES_STORE = '/var/lib/scrapyd/files'
|
Fix issue with scrapy local settings
|
## Code Before:
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'
## Instruction:
Fix issue with scrapy local settings
## Code After:
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
// ... existing code ...
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
// ... rest of the code ...
|
2a242bb6984fae5e32f117fa5ae68118621f3c95
|
pycroft/model/alembic/versions/fb8d553a7268_add_account_pattern.py
|
pycroft/model/alembic/versions/fb8d553a7268_add_account_pattern.py
|
from alembic import op
import sqlalchemy as sa
import pycroft
# revision identifiers, used by Alembic.
revision = 'fb8d553a7268'
down_revision = '0b69e80a9388'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('account_pattern',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('pattern', sa.String(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['account_id'], ['account.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('account_pattern')
# ### end Alembic commands ###
|
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fb8d553a7268'
down_revision = '0b69e80a9388'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('account_pattern',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('pattern', sa.String(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['account_id'], ['account.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('account_pattern')
# ### end Alembic commands ###
|
Remove unnecessary pycroft import in migration
|
Remove unnecessary pycroft import in migration
|
Python
|
apache-2.0
|
agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft
|
from alembic import op
import sqlalchemy as sa
- import pycroft
-
# revision identifiers, used by Alembic.
revision = 'fb8d553a7268'
down_revision = '0b69e80a9388'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('account_pattern',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('pattern', sa.String(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['account_id'], ['account.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('account_pattern')
# ### end Alembic commands ###
|
Remove unnecessary pycroft import in migration
|
## Code Before:
from alembic import op
import sqlalchemy as sa
import pycroft
# revision identifiers, used by Alembic.
revision = 'fb8d553a7268'
down_revision = '0b69e80a9388'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('account_pattern',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('pattern', sa.String(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['account_id'], ['account.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('account_pattern')
# ### end Alembic commands ###
## Instruction:
Remove unnecessary pycroft import in migration
## Code After:
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fb8d553a7268'
down_revision = '0b69e80a9388'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('account_pattern',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('pattern', sa.String(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['account_id'], ['account.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('account_pattern')
# ### end Alembic commands ###
|
// ... existing code ...
import sqlalchemy as sa
// ... rest of the code ...
|
2de7222ffd3d9f4cc7971ad142aa2542eb7ca117
|
yunity/stores/models.py
|
yunity/stores/models.py
|
from config import settings
from yunity.base.base_models import BaseModel, LocationModel
from django.db import models
class PickupDate(BaseModel):
date = models.DateTimeField()
collectors = models.ManyToManyField(settings.AUTH_USER_MODEL)
store = models.ForeignKey('stores.store', related_name='pickupdates', on_delete=models.CASCADE)
max_collectors = models.IntegerField(null=True)
class Store(BaseModel, LocationModel):
group = models.ForeignKey('groups.Group', on_delete=models.CASCADE)
name = models.TextField()
description = models.TextField(null=True)
|
from config import settings
from yunity.base.base_models import BaseModel, LocationModel
from django.db import models
class PickupDate(BaseModel):
date = models.DateTimeField()
collectors = models.ManyToManyField(settings.AUTH_USER_MODEL)
store = models.ForeignKey('stores.store', related_name='pickupdates', on_delete=models.CASCADE)
max_collectors = models.IntegerField(null=True)
class Store(BaseModel, LocationModel):
group = models.ForeignKey('groups.Group', on_delete=models.CASCADE, related_name='store')
name = models.TextField()
description = models.TextField(null=True)
|
Add related name for group of store
|
Add related name for group of store
|
Python
|
agpl-3.0
|
yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend
|
from config import settings
from yunity.base.base_models import BaseModel, LocationModel
from django.db import models
class PickupDate(BaseModel):
date = models.DateTimeField()
collectors = models.ManyToManyField(settings.AUTH_USER_MODEL)
store = models.ForeignKey('stores.store', related_name='pickupdates', on_delete=models.CASCADE)
max_collectors = models.IntegerField(null=True)
class Store(BaseModel, LocationModel):
- group = models.ForeignKey('groups.Group', on_delete=models.CASCADE)
+ group = models.ForeignKey('groups.Group', on_delete=models.CASCADE, related_name='store')
name = models.TextField()
description = models.TextField(null=True)
|
Add related name for group of store
|
## Code Before:
from config import settings
from yunity.base.base_models import BaseModel, LocationModel
from django.db import models
class PickupDate(BaseModel):
date = models.DateTimeField()
collectors = models.ManyToManyField(settings.AUTH_USER_MODEL)
store = models.ForeignKey('stores.store', related_name='pickupdates', on_delete=models.CASCADE)
max_collectors = models.IntegerField(null=True)
class Store(BaseModel, LocationModel):
group = models.ForeignKey('groups.Group', on_delete=models.CASCADE)
name = models.TextField()
description = models.TextField(null=True)
## Instruction:
Add related name for group of store
## Code After:
from config import settings
from yunity.base.base_models import BaseModel, LocationModel
from django.db import models
class PickupDate(BaseModel):
date = models.DateTimeField()
collectors = models.ManyToManyField(settings.AUTH_USER_MODEL)
store = models.ForeignKey('stores.store', related_name='pickupdates', on_delete=models.CASCADE)
max_collectors = models.IntegerField(null=True)
class Store(BaseModel, LocationModel):
group = models.ForeignKey('groups.Group', on_delete=models.CASCADE, related_name='store')
name = models.TextField()
description = models.TextField(null=True)
|
...
class Store(BaseModel, LocationModel):
group = models.ForeignKey('groups.Group', on_delete=models.CASCADE, related_name='store')
name = models.TextField()
...
|
33e40319b5d670c3fa1a1423bf7eed1865115d5c
|
sitetools/venv_hook/sitecustomize.py
|
sitetools/venv_hook/sitecustomize.py
|
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.abspath(os.path.join(
__file__,
'..', '..', '..'
)))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
Adjust venv_hook to work in new location
|
Adjust venv_hook to work in new location
|
Python
|
bsd-3-clause
|
westernx/sitetools,westernx/sitetools,mikeboers/sitetools
|
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
- sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+ sys.path.append(os.path.abspath(os.path.join(
+ __file__,
+ '..', '..', '..'
+ )))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
Adjust venv_hook to work in new location
|
## Code Before:
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
## Instruction:
Adjust venv_hook to work in new location
## Code After:
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.abspath(os.path.join(
__file__,
'..', '..', '..'
)))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
# ... existing code ...
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.abspath(os.path.join(
__file__,
'..', '..', '..'
)))
# ... rest of the code ...
|
c07bacb73eec4b963ec53c067f23385dad246fb6
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='[email protected]',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='[email protected]',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
Add classifiers to zencoder-py package
|
Add classifiers to zencoder-py package
|
Python
|
mit
|
zencoder/zencoder-py
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='[email protected]',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
- packages=['zencoder']
+ packages=['zencoder'],
+ platforms='any',
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Topic :: Software Development :: Libraries :: Python Modules'
+ ]
)
|
Add classifiers to zencoder-py package
|
## Code Before:
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='[email protected]',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
## Instruction:
Add classifiers to zencoder-py package
## Code After:
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='[email protected]',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
// ... existing code ...
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
// ... rest of the code ...
|
67d7ce2d9e8ffe26f5f5a97aca9cfb99c8914f3e
|
us_ignite/common/tests/utils.py
|
us_ignite/common/tests/utils.py
|
from django.core.urlresolvers import reverse
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
|
from django.core.urlresolvers import reverse
from django.contrib.messages.storage.base import BaseStorage, Message
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
class TestMessagesBackend(BaseStorage):
def __init__(self, request, *args, **kwargs):
self._loaded_data = []
super(TestMessagesBackend, self).__init__(request, *args, **kwargs)
def add(self, level, message, extra_tags=''):
self._loaded_data.append(
Message(level, message, extra_tags=extra_tags))
|
Add ``TestMessagesBackend`` for testing ``django.contrib.messages``.
|
Add ``TestMessagesBackend`` for testing ``django.contrib.messages``.
When unit testing a django view the ``messages`` middleware
will be missing. This backend will provision a simple
messaging midleware.
Usage::
from django.test import client
from us_ignite.common.tests import utils
factory = client.RequestFactory()
request = factory.get('/')
request._messages = utils.TestMessagesBackend(request)
The messages will be added to this simple backend.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
from django.core.urlresolvers import reverse
+ from django.contrib.messages.storage.base import BaseStorage, Message
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
+
+ class TestMessagesBackend(BaseStorage):
+ def __init__(self, request, *args, **kwargs):
+ self._loaded_data = []
+ super(TestMessagesBackend, self).__init__(request, *args, **kwargs)
+
+ def add(self, level, message, extra_tags=''):
+ self._loaded_data.append(
+ Message(level, message, extra_tags=extra_tags))
+
|
Add ``TestMessagesBackend`` for testing ``django.contrib.messages``.
|
## Code Before:
from django.core.urlresolvers import reverse
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
## Instruction:
Add ``TestMessagesBackend`` for testing ``django.contrib.messages``.
## Code After:
from django.core.urlresolvers import reverse
from django.contrib.messages.storage.base import BaseStorage, Message
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
class TestMessagesBackend(BaseStorage):
def __init__(self, request, *args, **kwargs):
self._loaded_data = []
super(TestMessagesBackend, self).__init__(request, *args, **kwargs)
def add(self, level, message, extra_tags=''):
self._loaded_data.append(
Message(level, message, extra_tags=extra_tags))
|
// ... existing code ...
from django.core.urlresolvers import reverse
from django.contrib.messages.storage.base import BaseStorage, Message
// ... modified code ...
return ('%s?next=%s' % (reverse('auth_login'), url))
class TestMessagesBackend(BaseStorage):
def __init__(self, request, *args, **kwargs):
self._loaded_data = []
super(TestMessagesBackend, self).__init__(request, *args, **kwargs)
def add(self, level, message, extra_tags=''):
self._loaded_data.append(
Message(level, message, extra_tags=extra_tags))
// ... rest of the code ...
|
7f1a58f9faacb0bb0e95c2527a348195742eb866
|
tornado/test/autoreload_test.py
|
tornado/test/autoreload_test.py
|
from __future__ import absolute_import, division, print_function
import os
import subprocess
from subprocess import Popen
import sys
from tempfile import mkdtemp
from tornado.test.util import unittest
MAIN = """\
import os
import sys
from tornado import autoreload
# This import will fail if path is not set up correctly
import testapp
print('Starting')
if 'TESTAPP_STARTED' not in os.environ:
os.environ['TESTAPP_STARTED'] = '1'
sys.stdout.flush()
autoreload._reload()
"""
class AutoreloadTest(unittest.TestCase):
def test_reload_module(self):
# Create temporary test application
path = mkdtemp()
os.mkdir(os.path.join(path, 'testapp'))
open(os.path.join(path, 'testapp/__init__.py'), 'w').close()
with open(os.path.join(path, 'testapp/__main__.py'), 'w') as f:
f.write(MAIN)
# Make sure the tornado module under test is available to the test
# application
pythonpath = os.getcwd()
if 'PYTHONPATH' in os.environ:
pythonpath += os.pathsep + os.environ['PYTHONPATH']
p = Popen([sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath))
out = p.communicate()[0].decode()
self.assertEqual(out, 'Starting\nStarting\n')
|
from __future__ import absolute_import, division, print_function
import os
import subprocess
from subprocess import Popen
import sys
from tempfile import mkdtemp
from tornado.test.util import unittest
MAIN = """\
import os
import sys
from tornado import autoreload
# This import will fail if path is not set up correctly
import testapp
print('Starting')
if 'TESTAPP_STARTED' not in os.environ:
os.environ['TESTAPP_STARTED'] = '1'
sys.stdout.flush()
autoreload._reload()
"""
class AutoreloadTest(unittest.TestCase):
def test_reload_module(self):
# Create temporary test application
path = mkdtemp()
os.mkdir(os.path.join(path, 'testapp'))
open(os.path.join(path, 'testapp/__init__.py'), 'w').close()
with open(os.path.join(path, 'testapp/__main__.py'), 'w') as f:
f.write(MAIN)
# Make sure the tornado module under test is available to the test
# application
pythonpath = os.getcwd()
if 'PYTHONPATH' in os.environ:
pythonpath += os.pathsep + os.environ['PYTHONPATH']
p = Popen(
[sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath),
universal_newlines=True)
out = p.communicate()[0]
self.assertEqual(out, 'Starting\nStarting\n')
|
Fix newline handling in autoreload test
|
Fix newline handling in autoreload test
|
Python
|
apache-2.0
|
SuminAndrew/tornado,mivade/tornado,legnaleurc/tornado,tornadoweb/tornado,ifduyue/tornado,bdarnell/tornado,NoyaInRain/tornado,bdarnell/tornado,ajdavis/tornado,NoyaInRain/tornado,bdarnell/tornado,eklitzke/tornado,wujuguang/tornado,allenl203/tornado,SuminAndrew/tornado,Lancher/tornado,Lancher/tornado,NoyaInRain/tornado,NoyaInRain/tornado,lilydjwg/tornado,allenl203/tornado,ajdavis/tornado,lilydjwg/tornado,hhru/tornado,SuminAndrew/tornado,ajdavis/tornado,wujuguang/tornado,wujuguang/tornado,dongpinglai/my_tornado,SuminAndrew/tornado,NoyaInRain/tornado,lilydjwg/tornado,allenl203/tornado,Lancher/tornado,mivade/tornado,dongpinglai/my_tornado,eklitzke/tornado,legnaleurc/tornado,hhru/tornado,wujuguang/tornado,bdarnell/tornado,mivade/tornado,dongpinglai/my_tornado,ifduyue/tornado,NoyaInRain/tornado,allenl203/tornado,mivade/tornado,hhru/tornado,dongpinglai/my_tornado,hhru/tornado,tornadoweb/tornado,ifduyue/tornado,mivade/tornado,lilydjwg/tornado,Lancher/tornado,dongpinglai/my_tornado,bdarnell/tornado,ifduyue/tornado,eklitzke/tornado,Lancher/tornado,allenl203/tornado,tornadoweb/tornado,eklitzke/tornado,ajdavis/tornado,ajdavis/tornado,tornadoweb/tornado,legnaleurc/tornado,legnaleurc/tornado,SuminAndrew/tornado,ifduyue/tornado,legnaleurc/tornado,hhru/tornado,eklitzke/tornado,wujuguang/tornado,dongpinglai/my_tornado
|
from __future__ import absolute_import, division, print_function
import os
import subprocess
from subprocess import Popen
import sys
from tempfile import mkdtemp
from tornado.test.util import unittest
MAIN = """\
import os
import sys
from tornado import autoreload
# This import will fail if path is not set up correctly
import testapp
print('Starting')
if 'TESTAPP_STARTED' not in os.environ:
os.environ['TESTAPP_STARTED'] = '1'
sys.stdout.flush()
autoreload._reload()
"""
class AutoreloadTest(unittest.TestCase):
def test_reload_module(self):
# Create temporary test application
path = mkdtemp()
os.mkdir(os.path.join(path, 'testapp'))
open(os.path.join(path, 'testapp/__init__.py'), 'w').close()
with open(os.path.join(path, 'testapp/__main__.py'), 'w') as f:
f.write(MAIN)
# Make sure the tornado module under test is available to the test
# application
pythonpath = os.getcwd()
if 'PYTHONPATH' in os.environ:
pythonpath += os.pathsep + os.environ['PYTHONPATH']
+ p = Popen(
- p = Popen([sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
+ [sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
- cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath))
+ cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath),
+ universal_newlines=True)
- out = p.communicate()[0].decode()
+ out = p.communicate()[0]
self.assertEqual(out, 'Starting\nStarting\n')
|
Fix newline handling in autoreload test
|
## Code Before:
from __future__ import absolute_import, division, print_function
import os
import subprocess
from subprocess import Popen
import sys
from tempfile import mkdtemp
from tornado.test.util import unittest
MAIN = """\
import os
import sys
from tornado import autoreload
# This import will fail if path is not set up correctly
import testapp
print('Starting')
if 'TESTAPP_STARTED' not in os.environ:
os.environ['TESTAPP_STARTED'] = '1'
sys.stdout.flush()
autoreload._reload()
"""
class AutoreloadTest(unittest.TestCase):
def test_reload_module(self):
# Create temporary test application
path = mkdtemp()
os.mkdir(os.path.join(path, 'testapp'))
open(os.path.join(path, 'testapp/__init__.py'), 'w').close()
with open(os.path.join(path, 'testapp/__main__.py'), 'w') as f:
f.write(MAIN)
# Make sure the tornado module under test is available to the test
# application
pythonpath = os.getcwd()
if 'PYTHONPATH' in os.environ:
pythonpath += os.pathsep + os.environ['PYTHONPATH']
p = Popen([sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath))
out = p.communicate()[0].decode()
self.assertEqual(out, 'Starting\nStarting\n')
## Instruction:
Fix newline handling in autoreload test
## Code After:
from __future__ import absolute_import, division, print_function
import os
import subprocess
from subprocess import Popen
import sys
from tempfile import mkdtemp
from tornado.test.util import unittest
MAIN = """\
import os
import sys
from tornado import autoreload
# This import will fail if path is not set up correctly
import testapp
print('Starting')
if 'TESTAPP_STARTED' not in os.environ:
os.environ['TESTAPP_STARTED'] = '1'
sys.stdout.flush()
autoreload._reload()
"""
class AutoreloadTest(unittest.TestCase):
def test_reload_module(self):
# Create temporary test application
path = mkdtemp()
os.mkdir(os.path.join(path, 'testapp'))
open(os.path.join(path, 'testapp/__init__.py'), 'w').close()
with open(os.path.join(path, 'testapp/__main__.py'), 'w') as f:
f.write(MAIN)
# Make sure the tornado module under test is available to the test
# application
pythonpath = os.getcwd()
if 'PYTHONPATH' in os.environ:
pythonpath += os.pathsep + os.environ['PYTHONPATH']
p = Popen(
[sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath),
universal_newlines=True)
out = p.communicate()[0]
self.assertEqual(out, 'Starting\nStarting\n')
|
...
p = Popen(
[sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath),
universal_newlines=True)
out = p.communicate()[0]
self.assertEqual(out, 'Starting\nStarting\n')
...
|
56e764835e75035452a6a1ea06c386ec61dbe872
|
src/rinoh/stylesheets/__init__.py
|
src/rinoh/stylesheets/__init__.py
|
import inspect
import os
import sys
from .. import DATA_PATH
from ..style import StyleSheetFile
from .matcher import matcher
__all__ = ['matcher', 'sphinx', 'sphinx_base14']
STYLESHEETS_PATH = os.path.join(DATA_PATH, 'stylesheets')
def path(filename):
return os.path.join(STYLESHEETS_PATH, filename)
sphinx = StyleSheetFile(path('sphinx.rts'))
sphinx_article = StyleSheetFile(path('sphinx_article.rts'))
sphinx_base14 = StyleSheetFile(path('base14.rts'))
# generate docstrings for the StyleSheet instances
for name, stylesheet in inspect.getmembers(sys.modules[__name__]):
if not isinstance(stylesheet, StyleSheetFile):
continue
stylesheet.__doc__ = (':entry point name: ``{}``\n\n{}'
.format(stylesheet, stylesheet.description))
|
import inspect
import os
import sys
from .. import DATA_PATH
from ..style import StyleSheetFile
from .matcher import matcher
__all__ = ['matcher', 'sphinx', 'sphinx_base14']
STYLESHEETS_PATH = os.path.join(DATA_PATH, 'stylesheets')
def path(filename):
return os.path.join(STYLESHEETS_PATH, filename)
sphinx = StyleSheetFile(path('sphinx.rts'))
sphinx_article = StyleSheetFile(path('sphinx_article.rts'))
sphinx_base14 = StyleSheetFile(path('base14.rts'))
# generate docstrings for the StyleSheet instances
for name, stylesheet in inspect.getmembers(sys.modules[__name__]):
if not isinstance(stylesheet, StyleSheetFile):
continue
stylesheet.__doc__ = ('{}\n\nEntry point name: ``{}``'
.format(stylesheet.description, stylesheet))
|
Fix the auto-generated docstrings of style sheets
|
Fix the auto-generated docstrings of style sheets
|
Python
|
agpl-3.0
|
brechtm/rinohtype,brechtm/rinohtype,brechtm/rinohtype
|
import inspect
import os
import sys
from .. import DATA_PATH
from ..style import StyleSheetFile
from .matcher import matcher
__all__ = ['matcher', 'sphinx', 'sphinx_base14']
STYLESHEETS_PATH = os.path.join(DATA_PATH, 'stylesheets')
def path(filename):
return os.path.join(STYLESHEETS_PATH, filename)
sphinx = StyleSheetFile(path('sphinx.rts'))
sphinx_article = StyleSheetFile(path('sphinx_article.rts'))
sphinx_base14 = StyleSheetFile(path('base14.rts'))
# generate docstrings for the StyleSheet instances
for name, stylesheet in inspect.getmembers(sys.modules[__name__]):
if not isinstance(stylesheet, StyleSheetFile):
continue
- stylesheet.__doc__ = (':entry point name: ``{}``\n\n{}'
+ stylesheet.__doc__ = ('{}\n\nEntry point name: ``{}``'
- .format(stylesheet, stylesheet.description))
+ .format(stylesheet.description, stylesheet))
|
Fix the auto-generated docstrings of style sheets
|
## Code Before:
import inspect
import os
import sys
from .. import DATA_PATH
from ..style import StyleSheetFile
from .matcher import matcher
__all__ = ['matcher', 'sphinx', 'sphinx_base14']
STYLESHEETS_PATH = os.path.join(DATA_PATH, 'stylesheets')
def path(filename):
return os.path.join(STYLESHEETS_PATH, filename)
sphinx = StyleSheetFile(path('sphinx.rts'))
sphinx_article = StyleSheetFile(path('sphinx_article.rts'))
sphinx_base14 = StyleSheetFile(path('base14.rts'))
# generate docstrings for the StyleSheet instances
for name, stylesheet in inspect.getmembers(sys.modules[__name__]):
if not isinstance(stylesheet, StyleSheetFile):
continue
stylesheet.__doc__ = (':entry point name: ``{}``\n\n{}'
.format(stylesheet, stylesheet.description))
## Instruction:
Fix the auto-generated docstrings of style sheets
## Code After:
import inspect
import os
import sys
from .. import DATA_PATH
from ..style import StyleSheetFile
from .matcher import matcher
__all__ = ['matcher', 'sphinx', 'sphinx_base14']
STYLESHEETS_PATH = os.path.join(DATA_PATH, 'stylesheets')
def path(filename):
return os.path.join(STYLESHEETS_PATH, filename)
sphinx = StyleSheetFile(path('sphinx.rts'))
sphinx_article = StyleSheetFile(path('sphinx_article.rts'))
sphinx_base14 = StyleSheetFile(path('base14.rts'))
# generate docstrings for the StyleSheet instances
for name, stylesheet in inspect.getmembers(sys.modules[__name__]):
if not isinstance(stylesheet, StyleSheetFile):
continue
stylesheet.__doc__ = ('{}\n\nEntry point name: ``{}``'
.format(stylesheet.description, stylesheet))
|
# ... existing code ...
continue
stylesheet.__doc__ = ('{}\n\nEntry point name: ``{}``'
.format(stylesheet.description, stylesheet))
# ... rest of the code ...
|
e9f25dd0c9028613ef7317ad3a8287dc60b9a217
|
slave/skia_slave_scripts/chromeos_install.py
|
slave/skia_slave_scripts/chromeos_install.py
|
from build_step import BuildStep
from chromeos_build_step import ChromeOSBuildStep
from install import Install
from utils import ssh_utils
import os
import sys
class ChromeOSInstall(ChromeOSBuildStep, Install):
def _PutSCP(self, executable):
ssh_utils.PutSCP(local_path=os.path.join('out', 'config',
'chromeos-' + self._args['board'],
self._configuration, executable),
remote_path='/usr/local/bin/skia_%s' % executable,
username=self._ssh_username,
host=self._ssh_host,
port=self._ssh_port)
def _Run(self):
super(ChromeOSInstall, self)._Run()
self._PutSCP('tests')
self._PutSCP('gm')
self._PutSCP('render_pictures')
self._PutSCP('render_pdfs')
self._PutSCP('bench')
self._PutSCP('bench_pictures')
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
|
from build_step import BuildStep
from chromeos_build_step import ChromeOSBuildStep
from install import Install
from utils import ssh_utils
import os
import sys
class ChromeOSInstall(ChromeOSBuildStep, Install):
def _PutSCP(self, executable):
# First, make sure that the program isn't running.
ssh_utils.RunSSH(self._ssh_username, self._ssh_host, self._ssh_port,
['killall', 'skia_%s' % executable])
ssh_utils.PutSCP(local_path=os.path.join('out', 'config',
'chromeos-' + self._args['board'],
self._configuration, executable),
remote_path='/usr/local/bin/skia_%s' % executable,
username=self._ssh_username,
host=self._ssh_host,
port=self._ssh_port)
def _Run(self):
super(ChromeOSInstall, self)._Run()
self._PutSCP('tests')
self._PutSCP('gm')
self._PutSCP('render_pictures')
self._PutSCP('render_pdfs')
self._PutSCP('bench')
self._PutSCP('bench_pictures')
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
|
Kill running Skia processes in ChromeOS Install step
|
Kill running Skia processes in ChromeOS Install step
(RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release)
[email protected]
Review URL: https://codereview.chromium.org/17599009
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9748 2bbb7eff-a529-9590-31e7-b0007b416f81
|
Python
|
bsd-3-clause
|
google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot
|
from build_step import BuildStep
from chromeos_build_step import ChromeOSBuildStep
from install import Install
from utils import ssh_utils
import os
import sys
class ChromeOSInstall(ChromeOSBuildStep, Install):
def _PutSCP(self, executable):
+ # First, make sure that the program isn't running.
+ ssh_utils.RunSSH(self._ssh_username, self._ssh_host, self._ssh_port,
+ ['killall', 'skia_%s' % executable])
ssh_utils.PutSCP(local_path=os.path.join('out', 'config',
'chromeos-' + self._args['board'],
self._configuration, executable),
remote_path='/usr/local/bin/skia_%s' % executable,
username=self._ssh_username,
host=self._ssh_host,
port=self._ssh_port)
def _Run(self):
super(ChromeOSInstall, self)._Run()
self._PutSCP('tests')
self._PutSCP('gm')
self._PutSCP('render_pictures')
self._PutSCP('render_pdfs')
self._PutSCP('bench')
self._PutSCP('bench_pictures')
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
|
Kill running Skia processes in ChromeOS Install step
|
## Code Before:
from build_step import BuildStep
from chromeos_build_step import ChromeOSBuildStep
from install import Install
from utils import ssh_utils
import os
import sys
class ChromeOSInstall(ChromeOSBuildStep, Install):
def _PutSCP(self, executable):
ssh_utils.PutSCP(local_path=os.path.join('out', 'config',
'chromeos-' + self._args['board'],
self._configuration, executable),
remote_path='/usr/local/bin/skia_%s' % executable,
username=self._ssh_username,
host=self._ssh_host,
port=self._ssh_port)
def _Run(self):
super(ChromeOSInstall, self)._Run()
self._PutSCP('tests')
self._PutSCP('gm')
self._PutSCP('render_pictures')
self._PutSCP('render_pdfs')
self._PutSCP('bench')
self._PutSCP('bench_pictures')
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
## Instruction:
Kill running Skia processes in ChromeOS Install step
## Code After:
from build_step import BuildStep
from chromeos_build_step import ChromeOSBuildStep
from install import Install
from utils import ssh_utils
import os
import sys
class ChromeOSInstall(ChromeOSBuildStep, Install):
def _PutSCP(self, executable):
# First, make sure that the program isn't running.
ssh_utils.RunSSH(self._ssh_username, self._ssh_host, self._ssh_port,
['killall', 'skia_%s' % executable])
ssh_utils.PutSCP(local_path=os.path.join('out', 'config',
'chromeos-' + self._args['board'],
self._configuration, executable),
remote_path='/usr/local/bin/skia_%s' % executable,
username=self._ssh_username,
host=self._ssh_host,
port=self._ssh_port)
def _Run(self):
super(ChromeOSInstall, self)._Run()
self._PutSCP('tests')
self._PutSCP('gm')
self._PutSCP('render_pictures')
self._PutSCP('render_pdfs')
self._PutSCP('bench')
self._PutSCP('bench_pictures')
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
|
// ... existing code ...
def _PutSCP(self, executable):
# First, make sure that the program isn't running.
ssh_utils.RunSSH(self._ssh_username, self._ssh_host, self._ssh_port,
['killall', 'skia_%s' % executable])
ssh_utils.PutSCP(local_path=os.path.join('out', 'config',
// ... rest of the code ...
|
96d8431cd50a50a4ba25d63fbe1718a7c0ccba18
|
wsgi/dapi/templatetags/deplink.py
|
wsgi/dapi/templatetags/deplink.py
|
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from dapi import models
register = template.Library()
@register.filter(needs_autoescape=True)
@stringfilter
def deplink(value, autoescape=None):
'''Add links for required daps'''
usedmark = ''
for mark in '< > ='.split():
split = value.split(mark)
if len(split) > 1:
usedmark = mark
break
if usedmark:
dap = split[0]
else:
dap = value
dep = dep.strip()
try:
m = models.MetaDap.objects.get(package_name=dap)
link = '<a href="' + m.get_human_link() + '">' + dap + '</a>'
except models.MetaDap.DoesNotExist:
link = '<abbr title="This dap is not on Dapi">' + dap + '</abbr>'
if usedmark:
link = link + usedmark + usedmark.join(split[1:])
return mark_safe(link)
|
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from dapi import models
register = template.Library()
@register.filter(needs_autoescape=True)
@stringfilter
def deplink(value, autoescape=None):
'''Add links for required daps'''
usedmark = ''
for mark in '< > ='.split():
split = value.split(mark)
if len(split) > 1:
usedmark = mark
break
dep = ''
if usedmark:
dap = split[0]
else:
dap = value
dep = dep.strip()
try:
m = models.MetaDap.objects.get(package_name=dap)
link = '<a href="' + m.get_human_link() + '">' + dap + '</a>'
except models.MetaDap.DoesNotExist:
link = '<abbr title="This dap is not on Dapi">' + dap + '</abbr>'
if usedmark:
link = link + usedmark + usedmark.join(split[1:])
return mark_safe(link)
|
Fix "UnboundLocalError: local variable 'dep' referenced before assignment"
|
Fix "UnboundLocalError: local variable 'dep' referenced before assignment"
|
Python
|
agpl-3.0
|
devassistant/dapi,devassistant/dapi,devassistant/dapi
|
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from dapi import models
register = template.Library()
@register.filter(needs_autoescape=True)
@stringfilter
def deplink(value, autoescape=None):
'''Add links for required daps'''
usedmark = ''
for mark in '< > ='.split():
split = value.split(mark)
if len(split) > 1:
usedmark = mark
break
+ dep = ''
if usedmark:
dap = split[0]
else:
dap = value
dep = dep.strip()
try:
m = models.MetaDap.objects.get(package_name=dap)
link = '<a href="' + m.get_human_link() + '">' + dap + '</a>'
except models.MetaDap.DoesNotExist:
link = '<abbr title="This dap is not on Dapi">' + dap + '</abbr>'
if usedmark:
link = link + usedmark + usedmark.join(split[1:])
return mark_safe(link)
|
Fix "UnboundLocalError: local variable 'dep' referenced before assignment"
|
## Code Before:
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from dapi import models
register = template.Library()
@register.filter(needs_autoescape=True)
@stringfilter
def deplink(value, autoescape=None):
'''Add links for required daps'''
usedmark = ''
for mark in '< > ='.split():
split = value.split(mark)
if len(split) > 1:
usedmark = mark
break
if usedmark:
dap = split[0]
else:
dap = value
dep = dep.strip()
try:
m = models.MetaDap.objects.get(package_name=dap)
link = '<a href="' + m.get_human_link() + '">' + dap + '</a>'
except models.MetaDap.DoesNotExist:
link = '<abbr title="This dap is not on Dapi">' + dap + '</abbr>'
if usedmark:
link = link + usedmark + usedmark.join(split[1:])
return mark_safe(link)
## Instruction:
Fix "UnboundLocalError: local variable 'dep' referenced before assignment"
## Code After:
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from dapi import models
register = template.Library()
@register.filter(needs_autoescape=True)
@stringfilter
def deplink(value, autoescape=None):
'''Add links for required daps'''
usedmark = ''
for mark in '< > ='.split():
split = value.split(mark)
if len(split) > 1:
usedmark = mark
break
dep = ''
if usedmark:
dap = split[0]
else:
dap = value
dep = dep.strip()
try:
m = models.MetaDap.objects.get(package_name=dap)
link = '<a href="' + m.get_human_link() + '">' + dap + '</a>'
except models.MetaDap.DoesNotExist:
link = '<abbr title="This dap is not on Dapi">' + dap + '</abbr>'
if usedmark:
link = link + usedmark + usedmark.join(split[1:])
return mark_safe(link)
|
# ... existing code ...
break
dep = ''
if usedmark:
# ... rest of the code ...
|
a8b1b4ca3fd4964b2349ed085e8d2350072e67b9
|
d1_libclient_python/src/d1_client/__init__.py
|
d1_libclient_python/src/d1_client/__init__.py
|
__version__ = "2.0.dev8"
__all__ = [
'cnclient',
'cnclient_1_1',
'd1baseclient',
'd1baseclient_1_1',
'd1baseclient_2_0',
'd1client',
'data_package',
'logrecorditerator',
'mnclient',
'mnclient_1_1',
'object_format_info',
'objectlistiterator',
'solr_client',
'svnrevision',
'systemmetadata',
]
|
__version__ = "2.0.dev8"
# __all__ = [
# 'cnclient',
# 'cnclient_1_1',
# 'd1baseclient',
# 'd1baseclient_1_1',
# 'd1baseclient_2_0',
# 'd1client',
# 'data_package',
# 'logrecorditerator',
# 'mnclient',
# 'mnclient_1_1',
# 'object_format_info',
# 'objectlistiterator',
# 'solr_client',
# 'svnrevision',
# 'systemmetadata',
# ]
|
Remove implicit import of symbols
|
Remove implicit import of symbols
Currently, using libclient for python requires selecting the target node type (MN or CN) and the target DataONE API version, by specifying the appropriate client, so it better to use the library by explicitly importing only the needed clients instead of all of of them.
|
Python
|
apache-2.0
|
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
|
__version__ = "2.0.dev8"
- __all__ = [
+ # __all__ = [
- 'cnclient',
+ # 'cnclient',
- 'cnclient_1_1',
+ # 'cnclient_1_1',
- 'd1baseclient',
+ # 'd1baseclient',
- 'd1baseclient_1_1',
+ # 'd1baseclient_1_1',
- 'd1baseclient_2_0',
+ # 'd1baseclient_2_0',
- 'd1client',
+ # 'd1client',
- 'data_package',
+ # 'data_package',
- 'logrecorditerator',
+ # 'logrecorditerator',
- 'mnclient',
+ # 'mnclient',
- 'mnclient_1_1',
+ # 'mnclient_1_1',
- 'object_format_info',
+ # 'object_format_info',
- 'objectlistiterator',
+ # 'objectlistiterator',
- 'solr_client',
+ # 'solr_client',
- 'svnrevision',
+ # 'svnrevision',
- 'systemmetadata',
+ # 'systemmetadata',
- ]
+ # ]
|
Remove implicit import of symbols
|
## Code Before:
__version__ = "2.0.dev8"
__all__ = [
'cnclient',
'cnclient_1_1',
'd1baseclient',
'd1baseclient_1_1',
'd1baseclient_2_0',
'd1client',
'data_package',
'logrecorditerator',
'mnclient',
'mnclient_1_1',
'object_format_info',
'objectlistiterator',
'solr_client',
'svnrevision',
'systemmetadata',
]
## Instruction:
Remove implicit import of symbols
## Code After:
__version__ = "2.0.dev8"
# __all__ = [
# 'cnclient',
# 'cnclient_1_1',
# 'd1baseclient',
# 'd1baseclient_1_1',
# 'd1baseclient_2_0',
# 'd1client',
# 'data_package',
# 'logrecorditerator',
# 'mnclient',
# 'mnclient_1_1',
# 'object_format_info',
# 'objectlistiterator',
# 'solr_client',
# 'svnrevision',
# 'systemmetadata',
# ]
|
...
# __all__ = [
# 'cnclient',
# 'cnclient_1_1',
# 'd1baseclient',
# 'd1baseclient_1_1',
# 'd1baseclient_2_0',
# 'd1client',
# 'data_package',
# 'logrecorditerator',
# 'mnclient',
# 'mnclient_1_1',
# 'object_format_info',
# 'objectlistiterator',
# 'solr_client',
# 'svnrevision',
# 'systemmetadata',
# ]
...
|
2e941cf1f6208b9ac2f6039681c24502b324ab5f
|
planner/models.py
|
planner/models.py
|
import datetime
from django.conf import settings
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class Milestone(models.Model):
date = models.DateTimeField()
@python_2_unicode_compatible
class School(models.Model):
name = models.TextField()
slug = models.SlugField(max_length=256, unique=True)
url = models.URLField(unique=True)
milestones_url = models.URLField()
def __str__(self):
return self.name
class Semester(models.Model):
active = models.BooleanField(default=True)
date = models.DateField()
# XXX: This is locked in for the default value of the initial migration.
# I'm not sure what needs to be done to let me safely delete this and
# have migrations continue to work.
def current_year():
today = datetime.date.today()
return today.year
@python_2_unicode_compatible
class Student(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='students')
first_name = models.TextField()
last_name = models.TextField()
# XXX: Remove null constraint after migration.
matriculation_semester = models.ForeignKey(
Semester, null=True, on_delete=models.PROTECT)
def __str__(self):
return '{} {}'.format(self.first_name, self.last_name)
|
import datetime
from django.conf import settings
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class Milestone(models.Model):
date = models.DateTimeField()
@python_2_unicode_compatible
class School(models.Model):
name = models.TextField()
slug = models.SlugField(max_length=256, unique=True)
url = models.URLField(unique=True)
milestones_url = models.URLField()
def __str__(self):
return self.name
@python_2_unicode_compatible
class Semester(models.Model):
active = models.BooleanField(default=True)
date = models.DateField()
def __str__(self):
return str(self.date)
# XXX: This is locked in for the default value of the initial migration.
# I'm not sure what needs to be done to let me safely delete this and
# have migrations continue to work.
def current_year():
today = datetime.date.today()
return today.year
@python_2_unicode_compatible
class Student(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='students')
first_name = models.TextField()
last_name = models.TextField()
# XXX: Remove null constraint after migration.
matriculation_semester = models.ForeignKey(
Semester, null=True, on_delete=models.PROTECT)
def __str__(self):
return '{} {}'.format(self.first_name, self.last_name)
|
Add Semester str method for admin interface.
|
Add Semester str method for admin interface.
Fixes #149
|
Python
|
bsd-2-clause
|
mblayman/lcp,mblayman/lcp,mblayman/lcp
|
import datetime
from django.conf import settings
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class Milestone(models.Model):
date = models.DateTimeField()
@python_2_unicode_compatible
class School(models.Model):
name = models.TextField()
slug = models.SlugField(max_length=256, unique=True)
url = models.URLField(unique=True)
milestones_url = models.URLField()
def __str__(self):
return self.name
+ @python_2_unicode_compatible
class Semester(models.Model):
active = models.BooleanField(default=True)
date = models.DateField()
+
+ def __str__(self):
+ return str(self.date)
# XXX: This is locked in for the default value of the initial migration.
# I'm not sure what needs to be done to let me safely delete this and
# have migrations continue to work.
def current_year():
today = datetime.date.today()
return today.year
@python_2_unicode_compatible
class Student(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='students')
first_name = models.TextField()
last_name = models.TextField()
# XXX: Remove null constraint after migration.
matriculation_semester = models.ForeignKey(
Semester, null=True, on_delete=models.PROTECT)
def __str__(self):
return '{} {}'.format(self.first_name, self.last_name)
|
Add Semester str method for admin interface.
|
## Code Before:
import datetime
from django.conf import settings
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class Milestone(models.Model):
date = models.DateTimeField()
@python_2_unicode_compatible
class School(models.Model):
name = models.TextField()
slug = models.SlugField(max_length=256, unique=True)
url = models.URLField(unique=True)
milestones_url = models.URLField()
def __str__(self):
return self.name
class Semester(models.Model):
active = models.BooleanField(default=True)
date = models.DateField()
# XXX: This is locked in for the default value of the initial migration.
# I'm not sure what needs to be done to let me safely delete this and
# have migrations continue to work.
def current_year():
today = datetime.date.today()
return today.year
@python_2_unicode_compatible
class Student(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='students')
first_name = models.TextField()
last_name = models.TextField()
# XXX: Remove null constraint after migration.
matriculation_semester = models.ForeignKey(
Semester, null=True, on_delete=models.PROTECT)
def __str__(self):
return '{} {}'.format(self.first_name, self.last_name)
## Instruction:
Add Semester str method for admin interface.
## Code After:
import datetime
from django.conf import settings
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class Milestone(models.Model):
date = models.DateTimeField()
@python_2_unicode_compatible
class School(models.Model):
name = models.TextField()
slug = models.SlugField(max_length=256, unique=True)
url = models.URLField(unique=True)
milestones_url = models.URLField()
def __str__(self):
return self.name
@python_2_unicode_compatible
class Semester(models.Model):
active = models.BooleanField(default=True)
date = models.DateField()
def __str__(self):
return str(self.date)
# XXX: This is locked in for the default value of the initial migration.
# I'm not sure what needs to be done to let me safely delete this and
# have migrations continue to work.
def current_year():
today = datetime.date.today()
return today.year
@python_2_unicode_compatible
class Student(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='students')
first_name = models.TextField()
last_name = models.TextField()
# XXX: Remove null constraint after migration.
matriculation_semester = models.ForeignKey(
Semester, null=True, on_delete=models.PROTECT)
def __str__(self):
return '{} {}'.format(self.first_name, self.last_name)
|
// ... existing code ...
@python_2_unicode_compatible
class Semester(models.Model):
// ... modified code ...
date = models.DateField()
def __str__(self):
return str(self.date)
// ... rest of the code ...
|
f5af9624359523ddf67b63327d8fe85382497c47
|
pycroft/helpers/user.py
|
pycroft/helpers/user.py
|
from passlib.apps import ldap_context
import passlib.utils
ldap_context = ldap_context.copy(default="ldap_sha512_crypt")
def generate_password(length):
charset = "abcdefghijklmnopqrstuvwxyz!$%&()=.," \
":;-_#+1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ"
return passlib.utils.generate_password(length, charset)
def hash_password(plaintext_passwd):
"""Use a ldap_context to generate a RFC 2307 from given plaintext.
The ldap_context is configured to generate the very secure ldap_sha512_crypt
hashes (a crypt extension available since glibc 2.7).
"""
return ldap_context.encrypt(plaintext_passwd)
def verify_password(plaintext_password, hash):
"""Verifies a plain password string against a given password hash.
It uses a ldap_context to verify RFC 2307 hashes.
"""
try:
return ldap_context.verify(plaintext_password, hash)
except ValueError:
return False
|
from passlib.apps import ldap_context
import passlib.utils
crypt_context = ldap_context.copy(
default="ldap_sha512_crypt",
deprecated=["ldap_plaintext", "ldap_md5", "ldap_sha1", "ldap_salted_md5",
"ldap_des_crypt", "ldap_bsdi_crypt", "ldap_md5_crypt"])
def generate_password(length):
charset = "abcdefghijklmnopqrstuvwxyz!$%&()=.," \
":;-_#+1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ"
return passlib.utils.generate_password(length, charset)
def hash_password(plaintext_passwd):
"""Generate a RFC 2307 complaint hash from given plaintext.
The passlib CryptContext is configured to generate the very secure
ldap_sha512_crypt hashes (a crypt extension available since glibc 2.7).
"""
return crypt_context.encrypt(plaintext_passwd)
def verify_password(plaintext_password, hash):
"""Verifies a plain password string against a given password hash.
It uses a crypt_context to verify RFC 2307 hashes.
"""
try:
return crypt_context.verify(plaintext_password, hash)
except ValueError:
return False
|
Set deprecated password hashing schemes
|
Set deprecated password hashing schemes
|
Python
|
apache-2.0
|
agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft
|
from passlib.apps import ldap_context
import passlib.utils
- ldap_context = ldap_context.copy(default="ldap_sha512_crypt")
+ crypt_context = ldap_context.copy(
+ default="ldap_sha512_crypt",
+ deprecated=["ldap_plaintext", "ldap_md5", "ldap_sha1", "ldap_salted_md5",
+ "ldap_des_crypt", "ldap_bsdi_crypt", "ldap_md5_crypt"])
def generate_password(length):
charset = "abcdefghijklmnopqrstuvwxyz!$%&()=.," \
":;-_#+1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ"
return passlib.utils.generate_password(length, charset)
def hash_password(plaintext_passwd):
- """Use a ldap_context to generate a RFC 2307 from given plaintext.
+ """Generate a RFC 2307 complaint hash from given plaintext.
- The ldap_context is configured to generate the very secure ldap_sha512_crypt
+ The passlib CryptContext is configured to generate the very secure
- hashes (a crypt extension available since glibc 2.7).
+ ldap_sha512_crypt hashes (a crypt extension available since glibc 2.7).
"""
- return ldap_context.encrypt(plaintext_passwd)
+ return crypt_context.encrypt(plaintext_passwd)
def verify_password(plaintext_password, hash):
"""Verifies a plain password string against a given password hash.
- It uses a ldap_context to verify RFC 2307 hashes.
+ It uses a crypt_context to verify RFC 2307 hashes.
"""
try:
- return ldap_context.verify(plaintext_password, hash)
+ return crypt_context.verify(plaintext_password, hash)
except ValueError:
return False
|
Set deprecated password hashing schemes
|
## Code Before:
from passlib.apps import ldap_context
import passlib.utils
ldap_context = ldap_context.copy(default="ldap_sha512_crypt")
def generate_password(length):
charset = "abcdefghijklmnopqrstuvwxyz!$%&()=.," \
":;-_#+1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ"
return passlib.utils.generate_password(length, charset)
def hash_password(plaintext_passwd):
"""Use a ldap_context to generate a RFC 2307 from given plaintext.
The ldap_context is configured to generate the very secure ldap_sha512_crypt
hashes (a crypt extension available since glibc 2.7).
"""
return ldap_context.encrypt(plaintext_passwd)
def verify_password(plaintext_password, hash):
"""Verifies a plain password string against a given password hash.
It uses a ldap_context to verify RFC 2307 hashes.
"""
try:
return ldap_context.verify(plaintext_password, hash)
except ValueError:
return False
## Instruction:
Set deprecated password hashing schemes
## Code After:
from passlib.apps import ldap_context
import passlib.utils
crypt_context = ldap_context.copy(
default="ldap_sha512_crypt",
deprecated=["ldap_plaintext", "ldap_md5", "ldap_sha1", "ldap_salted_md5",
"ldap_des_crypt", "ldap_bsdi_crypt", "ldap_md5_crypt"])
def generate_password(length):
charset = "abcdefghijklmnopqrstuvwxyz!$%&()=.," \
":;-_#+1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ"
return passlib.utils.generate_password(length, charset)
def hash_password(plaintext_passwd):
"""Generate a RFC 2307 complaint hash from given plaintext.
The passlib CryptContext is configured to generate the very secure
ldap_sha512_crypt hashes (a crypt extension available since glibc 2.7).
"""
return crypt_context.encrypt(plaintext_passwd)
def verify_password(plaintext_password, hash):
"""Verifies a plain password string against a given password hash.
It uses a crypt_context to verify RFC 2307 hashes.
"""
try:
return crypt_context.verify(plaintext_password, hash)
except ValueError:
return False
|
...
crypt_context = ldap_context.copy(
default="ldap_sha512_crypt",
deprecated=["ldap_plaintext", "ldap_md5", "ldap_sha1", "ldap_salted_md5",
"ldap_des_crypt", "ldap_bsdi_crypt", "ldap_md5_crypt"])
...
def hash_password(plaintext_passwd):
"""Generate a RFC 2307 complaint hash from given plaintext.
The passlib CryptContext is configured to generate the very secure
ldap_sha512_crypt hashes (a crypt extension available since glibc 2.7).
"""
return crypt_context.encrypt(plaintext_passwd)
...
It uses a crypt_context to verify RFC 2307 hashes.
"""
...
try:
return crypt_context.verify(plaintext_password, hash)
except ValueError:
...
|
a3ee55cf4d9182247dcc7a42b0336c467dce9e3e
|
linter.py
|
linter.py
|
from SublimeLinter.lint import Linter, util
class Cppcheck(Linter):
cmd = (
'cppcheck',
'--template={file}:{line}: {severity}: {message}',
'--inline-suppr',
'--quiet',
'${args}',
'${file}'
)
regex = (
r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)?\s+'
r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):\s+'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout
on_stderr = None # handle stderr via split_match
tempfile_suffix = '-'
defaults = {
'selector': 'source.c, source.c++',
'--std=,+': [], # example ['c99', 'c89']
'--enable=,': 'style',
}
def split_match(self, match):
"""
Return the components of the match.
We override this because included header files can cause linter errors,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
return None
return super().split_match(match)
|
from SublimeLinter.lint import Linter, util
class Cppcheck(Linter):
cmd = (
'cppcheck',
'--template={file}:{line}:{column}:{severity}:{id}:{message}',
'--inline-suppr',
'--quiet',
'${args}',
'${file}'
)
regex = (
r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)'
r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):'
r'(?P<code>\w+):(?P<message>.+)'
)
error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout
on_stderr = None # handle stderr via split_match
tempfile_suffix = '-'
defaults = {
'selector': 'source.c, source.c++',
'--std=,+': [], # example ['c99', 'c89']
'--enable=,': 'style',
}
def split_match(self, match):
"""
Return the components of the match.
We override this because included header files can cause linter errors,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
return None
return super().split_match(match)
|
Add cppcheck issue id as code field
|
Add cppcheck issue id as code field
|
Python
|
mit
|
SublimeLinter/SublimeLinter-cppcheck
|
from SublimeLinter.lint import Linter, util
class Cppcheck(Linter):
cmd = (
'cppcheck',
- '--template={file}:{line}: {severity}: {message}',
+ '--template={file}:{line}:{column}:{severity}:{id}:{message}',
'--inline-suppr',
'--quiet',
'${args}',
'${file}'
)
regex = (
- r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)?\s+'
+ r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)'
- r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):\s+'
+ r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):'
- r'(?P<message>.+)'
+ r'(?P<code>\w+):(?P<message>.+)'
)
error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout
on_stderr = None # handle stderr via split_match
tempfile_suffix = '-'
defaults = {
'selector': 'source.c, source.c++',
'--std=,+': [], # example ['c99', 'c89']
'--enable=,': 'style',
}
def split_match(self, match):
"""
Return the components of the match.
We override this because included header files can cause linter errors,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
return None
return super().split_match(match)
|
Add cppcheck issue id as code field
|
## Code Before:
from SublimeLinter.lint import Linter, util
class Cppcheck(Linter):
cmd = (
'cppcheck',
'--template={file}:{line}: {severity}: {message}',
'--inline-suppr',
'--quiet',
'${args}',
'${file}'
)
regex = (
r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)?\s+'
r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):\s+'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout
on_stderr = None # handle stderr via split_match
tempfile_suffix = '-'
defaults = {
'selector': 'source.c, source.c++',
'--std=,+': [], # example ['c99', 'c89']
'--enable=,': 'style',
}
def split_match(self, match):
"""
Return the components of the match.
We override this because included header files can cause linter errors,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
return None
return super().split_match(match)
## Instruction:
Add cppcheck issue id as code field
## Code After:
from SublimeLinter.lint import Linter, util
class Cppcheck(Linter):
cmd = (
'cppcheck',
'--template={file}:{line}:{column}:{severity}:{id}:{message}',
'--inline-suppr',
'--quiet',
'${args}',
'${file}'
)
regex = (
r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)'
r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):'
r'(?P<code>\w+):(?P<message>.+)'
)
error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout
on_stderr = None # handle stderr via split_match
tempfile_suffix = '-'
defaults = {
'selector': 'source.c, source.c++',
'--std=,+': [], # example ['c99', 'c89']
'--enable=,': 'style',
}
def split_match(self, match):
"""
Return the components of the match.
We override this because included header files can cause linter errors,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
return None
return super().split_match(match)
|
...
'cppcheck',
'--template={file}:{line}:{column}:{severity}:{id}:{message}',
'--inline-suppr',
...
regex = (
r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)'
r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):'
r'(?P<code>\w+):(?P<message>.+)'
)
...
|
c458b78ccecc28971ef239de5a5366bd56d2562e
|
web/portal/views/home.py
|
web/portal/views/home.py
|
from flask import redirect, url_for
from portal import app
@app.route('/', methods=['GET'])
def index():
return redirect(url_for('practices_index', _external=True))
|
from flask import redirect, url_for
from portal import app
@app.route('/', methods=['GET'])
def index():
return redirect(url_for('practices_index', _external=True, _scheme="https"))
|
Fix incorrect protocol being using when behin reverse proxy
|
Fix incorrect protocol being using when behin reverse proxy
|
Python
|
mit
|
LCBRU/genvasc_portal,LCBRU/genvasc_portal,LCBRU/genvasc_portal,LCBRU/genvasc_portal
|
from flask import redirect, url_for
from portal import app
@app.route('/', methods=['GET'])
def index():
- return redirect(url_for('practices_index', _external=True))
+ return redirect(url_for('practices_index', _external=True, _scheme="https"))
|
Fix incorrect protocol being using when behin reverse proxy
|
## Code Before:
from flask import redirect, url_for
from portal import app
@app.route('/', methods=['GET'])
def index():
return redirect(url_for('practices_index', _external=True))
## Instruction:
Fix incorrect protocol being using when behin reverse proxy
## Code After:
from flask import redirect, url_for
from portal import app
@app.route('/', methods=['GET'])
def index():
return redirect(url_for('practices_index', _external=True, _scheme="https"))
|
# ... existing code ...
def index():
return redirect(url_for('practices_index', _external=True, _scheme="https"))
# ... rest of the code ...
|
b48984747d0f33f8ad9a8721bf7489d8ff97c157
|
matador/commands/deploy_ticket.py
|
matador/commands/deploy_ticket.py
|
from .command import Command
from matador import utils
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
def _execute(self):
project = utils.project()
utils.update_repository(project)
|
from .command import Command
from matador import utils
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project)
|
Add package argument to deploy-ticket
|
Add package argument to deploy-ticket
|
Python
|
mit
|
Empiria/matador
|
from .command import Command
from matador import utils
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
+ parser.add_argument(
+ '-', '--package',
+ type=bool,
+ default=False,
+ help='Agresso environment name')
+
def _execute(self):
project = utils.project()
+ if not self.args.package:
- utils.update_repository(project)
+ utils.update_repository(project)
|
Add package argument to deploy-ticket
|
## Code Before:
from .command import Command
from matador import utils
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
def _execute(self):
project = utils.project()
utils.update_repository(project)
## Instruction:
Add package argument to deploy-ticket
## Code After:
from .command import Command
from matador import utils
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project)
|
...
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _execute(self):
...
project = utils.project()
if not self.args.package:
utils.update_repository(project)
...
|
f60b940205a5e1011ce1c9c5672cb262c4649c0b
|
app/mod_auth/forms.py
|
app/mod_auth/forms.py
|
from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired()])
|
from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired, ValidationError, EqualTo
from .models import User
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired(), EqualTo('password')])
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Username already in use. Please use a different username.')
|
Validate username and password on signup
|
Validate username and password on signup
|
Python
|
mit
|
ziel980/website,ziel980/website
|
from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
- from wtforms.validators import DataRequired
+ from wtforms.validators import DataRequired, ValidationError, EqualTo
+ from .models import User
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
+
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
- password2 = PasswordField('password2', validators=[DataRequired()])
+ password2 = PasswordField('password2', validators=[DataRequired(), EqualTo('password')])
+ def validate_username(self, username):
+ user = User.query.filter_by(username=username.data).first()
+ if user is not None:
+ raise ValidationError('Username already in use. Please use a different username.')
+
|
Validate username and password on signup
|
## Code Before:
from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired()])
## Instruction:
Validate username and password on signup
## Code After:
from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired, ValidationError, EqualTo
from .models import User
class LoginForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class SignupForm(FlaskForm):
username = StringField('username', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired(), EqualTo('password')])
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Username already in use. Please use a different username.')
|
...
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired, ValidationError, EqualTo
from .models import User
...
class SignupForm(FlaskForm):
...
password = PasswordField('password', validators=[DataRequired()])
password2 = PasswordField('password2', validators=[DataRequired(), EqualTo('password')])
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Username already in use. Please use a different username.')
...
|
ffc1b8c83e32f4c2b5454a0ae71b9c30cc8e7596
|
toolz/tests/test_serialization.py
|
toolz/tests/test_serialization.py
|
from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
|
from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
def test_complement():
f = complement(bool)
assert f(True) is False
assert f(False) is True
g = pickle.loads(pickle.dumps(f))
assert f(True) == g(True)
assert f(False) == g(False)
|
Add serialization test for `complement`
|
Add serialization test for `complement`
|
Python
|
bsd-3-clause
|
pombredanne/toolz,simudream/toolz,machinelearningdeveloper/toolz,quantopian/toolz,jdmcbr/toolz,bartvm/toolz,jcrist/toolz,cpcloud/toolz,pombredanne/toolz,quantopian/toolz,simudream/toolz,machinelearningdeveloper/toolz,bartvm/toolz,llllllllll/toolz,jdmcbr/toolz,llllllllll/toolz,cpcloud/toolz,jcrist/toolz
|
from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
+
+ def test_complement():
+ f = complement(bool)
+ assert f(True) is False
+ assert f(False) is True
+ g = pickle.loads(pickle.dumps(f))
+ assert f(True) == g(True)
+ assert f(False) == g(False)
+
|
Add serialization test for `complement`
|
## Code Before:
from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
## Instruction:
Add serialization test for `complement`
## Code After:
from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
def test_complement():
f = complement(bool)
assert f(True) is False
assert f(False) is True
g = pickle.loads(pickle.dumps(f))
assert f(True) == g(True)
assert f(False) == g(False)
|
// ... existing code ...
assert f.funcs == g.funcs
def test_complement():
f = complement(bool)
assert f(True) is False
assert f(False) is True
g = pickle.loads(pickle.dumps(f))
assert f(True) == g(True)
assert f(False) == g(False)
// ... rest of the code ...
|
48b8efabd11a44dfabcd91f6744858535ddfb498
|
djangosaml2/templatetags/idplist.py
|
djangosaml2/templatetags/idplist.py
|
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
self.conf = config_settings_loader()
def render(self, context):
context[self.variable_name] = self.conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
def render(self, context):
conf = config_settings_loader()
context[self.variable_name] = conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant
|
Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant
|
Python
|
apache-2.0
|
GradConnection/djangosaml2,advisory/djangosaml2_tenant,WiserTogether/djangosaml2,kviktor/djangosaml2-py3,advisory/djangosaml2_tenant,Gagnavarslan/djangosaml2,shabda/djangosaml2,GradConnection/djangosaml2,WiserTogether/djangosaml2,shabda/djangosaml2,kviktor/djangosaml2-py3,City-of-Helsinki/djangosaml2,City-of-Helsinki/djangosaml2
|
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
- self.conf = config_settings_loader()
def render(self, context):
+ conf = config_settings_loader()
- context[self.variable_name] = self.conf.get_available_idps()
+ context[self.variable_name] = conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant
|
## Code Before:
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
self.conf = config_settings_loader()
def render(self, context):
context[self.variable_name] = self.conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
## Instruction:
Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant
## Code After:
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
def render(self, context):
conf = config_settings_loader()
context[self.variable_name] = conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
# ... existing code ...
self.variable_name = variable_name
# ... modified code ...
def render(self, context):
conf = config_settings_loader()
context[self.variable_name] = conf.get_available_idps()
return ''
# ... rest of the code ...
|
f87a923678f5d7e9f6390ffcb42eae6b2a0f9cc2
|
services/views.py
|
services/views.py
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
outgoing['api_key'] = settings.OPEN311['API_KEY']
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
if outgoing.get('internal_feedback', False):
if 'internal_feedback' in outgoing:
del outgoing['internal_feedback']
api_key = settings.OPEN311['INTERNAL_FEEDBACK_API_KEY']
else:
api_key = settings.OPEN311['API_KEY']
outgoing['api_key'] = api_key
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
Use separate API key for feedback about app.
|
Use separate API key for feedback about app.
|
Python
|
agpl-3.0
|
City-of-Helsinki/smbackend,City-of-Helsinki/smbackend
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
+ if outgoing.get('internal_feedback', False):
+ if 'internal_feedback' in outgoing:
+ del outgoing['internal_feedback']
+ api_key = settings.OPEN311['INTERNAL_FEEDBACK_API_KEY']
+ else:
- outgoing['api_key'] = settings.OPEN311['API_KEY']
+ api_key = settings.OPEN311['API_KEY']
+ outgoing['api_key'] = api_key
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
Use separate API key for feedback about app.
|
## Code Before:
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
outgoing['api_key'] = settings.OPEN311['API_KEY']
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
## Instruction:
Use separate API key for feedback about app.
## Code After:
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
if outgoing.get('internal_feedback', False):
if 'internal_feedback' in outgoing:
del outgoing['internal_feedback']
api_key = settings.OPEN311['INTERNAL_FEEDBACK_API_KEY']
else:
api_key = settings.OPEN311['API_KEY']
outgoing['api_key'] = api_key
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
// ... existing code ...
outgoing = payload.dict()
if outgoing.get('internal_feedback', False):
if 'internal_feedback' in outgoing:
del outgoing['internal_feedback']
api_key = settings.OPEN311['INTERNAL_FEEDBACK_API_KEY']
else:
api_key = settings.OPEN311['API_KEY']
outgoing['api_key'] = api_key
url = settings.OPEN311['URL_BASE']
// ... rest of the code ...
|
3d1cef9e56d7fac8a1b89861b7443e4ca660e4a8
|
nova/ipv6/api.py
|
nova/ipv6/api.py
|
from nova import flags
from nova import utils
FLAGS = flags.FLAGS
flags.DEFINE_string('ipv6_backend',
'rfc2462',
'Backend to use for IPv6 generation')
def reset_backend():
global IMPL
IMPL = utils.LazyPluggable(FLAGS['ipv6_backend'],
rfc2462='nova.ipv6.rfc2462',
account_identifier='nova.ipv6.account_identifier')
def to_global(prefix, mac, project_id):
return IMPL.to_global(prefix, mac, project_id)
def to_mac(ipv6_address):
return IMPL.to_mac(ipv6_address)
reset_backend()
|
from nova import flags
from nova import utils
FLAGS = flags.FLAGS
flags.DEFINE_string('ipv6_backend',
'rfc2462',
'Backend to use for IPv6 generation')
def reset_backend():
global IMPL
IMPL = utils.LazyPluggable(FLAGS['ipv6_backend'],
rfc2462='nova.ipv6.rfc2462',
account_identifier='nova.ipv6.account_identifier')
def to_global(prefix, mac, project_id):
return IMPL.to_global(prefix, mac, project_id)
def to_mac(ipv6_address):
return IMPL.to_mac(ipv6_address)
reset_backend()
|
Reduce indentation to avoid PEP8 failures
|
Reduce indentation to avoid PEP8 failures
|
Python
|
apache-2.0
|
vmturbo/nova,fnordahl/nova,cloudbau/nova,CEG-FYP-OpenStack/scheduler,vladikr/nova_drafts,yrobla/nova,bigswitch/nova,eharney/nova,KarimAllah/nova,Stavitsky/nova,TwinkleChawla/nova,zzicewind/nova,klmitch/nova,rickerc/nova_audit,belmiromoreira/nova,cloudbase/nova-virtualbox,luogangyi/bcec-nova,Yusuke1987/openstack_template,eneabio/nova,SUSE-Cloud/nova,nikesh-mahalka/nova,fajoy/nova,petrutlucian94/nova_dev,gooddata/openstack-nova,NewpTone/stacklab-nova,Juniper/nova,NoBodyCam/TftpPxeBootBareMetal,sebrandon1/nova,shail2810/nova,usc-isi/extra-specs,orbitfp7/nova,usc-isi/nova,yatinkumbhare/openstack-nova,spring-week-topos/nova-week,yrobla/nova,usc-isi/nova,joker946/nova,usc-isi/extra-specs,apporc/nova,tanglei528/nova,klmitch/nova,cernops/nova,varunarya10/nova_test_latest,salv-orlando/MyRepo,blueboxgroup/nova,BeyondTheClouds/nova,watonyweng/nova,akash1808/nova_test_latest,angdraug/nova,dims/nova,sebrandon1/nova,sridevikoushik31/nova,bgxavier/nova,superstack/nova,ntt-sic/nova,gspilio/nova,NewpTone/stacklab-nova,viggates/nova,adelina-t/nova,maheshp/novatest,varunarya10/nova_test_latest,CiscoSystems/nova,orbitfp7/nova,fajoy/nova,usc-isi/nova,silenceli/nova,psiwczak/openstack,akash1808/nova,mgagne/nova,fnordahl/nova,luogangyi/bcec-nova,NoBodyCam/TftpPxeBootBareMetal,redhat-openstack/nova,NeCTAR-RC/nova,cloudbase/nova,virtualopensystems/nova,mikalstill/nova,cloudbase/nova,CEG-FYP-OpenStack/scheduler,scripnichenko/nova,superstack/nova,DirectXMan12/nova-hacking,edulramirez/nova,houshengbo/nova_vmware_compute_driver,Yuriy-Leonov/nova,Stavitsky/nova,usc-isi/extra-specs,double12gzh/nova,eneabio/nova,plumgrid/plumgrid-nova,imsplitbit/nova,mmnelemane/nova,zhimin711/nova,eonpatapon/nova,ted-gould/nova,aristanetworks/arista-ovs-nova,akash1808/nova_test_latest,aristanetworks/arista-ovs-nova,sileht/deb-openstack-nova,yatinkumbhare/openstack-nova,TieWei/nova,raildo/nova,shootstar/novatest,KarimAllah/nova,Metaswitch/calico-nova,maheshp/novatest,jianghuaw/nova,josephsuh/extra-specs,adelina-t/nova,Juniper/nova,yosshy/nova,whitepages/nova,affo/nova,yrobla/nova,silenceli/nova,bigswitch/nova,whitepages/nova,tudorvio/nova,openstack/nova,phenoxim/nova,CloudServer/nova,joker946/nova,mikalstill/nova,maoy/zknova,bgxavier/nova,citrix-openstack-build/nova,aristanetworks/arista-ovs-nova,dims/nova,gooddata/openstack-nova,devendermishrajio/nova_test_latest,thomasem/nova,NewpTone/stacklab-nova,qwefi/nova,dstroppa/openstack-smartos-nova-grizzly,MountainWei/nova,eayunstack/nova,maoy/zknova,zzicewind/nova,rajalokan/nova,CCI-MOC/nova,mahak/nova,Yusuke1987/openstack_template,saleemjaveds/https-github.com-openstack-nova,kimjaejoong/nova,cloudbase/nova-virtualbox,zhimin711/nova,Juniper/nova,Tehsmash/nova,Francis-Liu/animated-broccoli,phenoxim/nova,JioCloud/nova_test_latest,rahulunair/nova,citrix-openstack-build/nova,badock/nova,petrutlucian94/nova,Triv90/Nova,JianyuWang/nova,sridevikoushik31/nova,apporc/nova,dawnpower/nova,psiwczak/openstack,JioCloud/nova,rajalokan/nova,hanlind/nova,DirectXMan12/nova-hacking,vladikr/nova_drafts,tangfeixiong/nova,eonpatapon/nova,mahak/nova,sileht/deb-openstack-nova,JioCloud/nova_test_latest,leilihh/nova,alexandrucoman/vbox-nova-driver,savi-dev/nova,barnsnake351/nova,jeffrey4l/nova,gspilio/nova,Triv90/Nova,viggates/nova,j-carpentier/nova,klmitch/nova,mikalstill/nova,thomasem/nova,cyx1231st/nova,scripnichenko/nova,josephsuh/extra-specs,sileht/deb-openstack-nova,MountainWei/nova,CCI-MOC/nova,badock/nova,imsplitbit/nova,Metaswitch/calico-nova,leilihh/novaha,russellb/nova,klmitch/nova,shootstar/novatest,JioCloud/nova,rajalokan/nova,ted-gould/nova,BeyondTheClouds/nova,kimjaejoong/nova,rajalokan/nova,rrader/nova-docker-plugin,psiwczak/openstack,CiscoSystems/nova,bclau/nova,watonyweng/nova,maoy/zknova,alvarolopez/nova,devoid/nova,cernops/nova,qwefi/nova,josephsuh/extra-specs,rahulunair/nova,tanglei528/nova,ruslanloman/nova,eneabio/nova,nikesh-mahalka/nova,SUSE-Cloud/nova,vmturbo/nova,jeffrey4l/nova,takeshineshiro/nova,salv-orlando/MyRepo,paulmathews/nova,tianweizhang/nova,dawnpower/nova,devoid/nova,houshengbo/nova_vmware_compute_driver,NoBodyCam/TftpPxeBootBareMetal,jianghuaw/nova,tealover/nova,superstack/nova,rrader/nova-docker-plugin,KarimAllah/nova,felixma/nova,russellb/nova,NeCTAR-RC/nova,berrange/nova,maelnor/nova,mmnelemane/nova,j-carpentier/nova,eharney/nova,sacharya/nova,alvarolopez/nova,LoHChina/nova,spring-week-topos/nova-week,tealover/nova,virtualopensystems/nova,berrange/nova,edulramirez/nova,russellb/nova,plumgrid/plumgrid-nova,raildo/nova,petrutlucian94/nova_dev,projectcalico/calico-nova,ewindisch/nova,vmturbo/nova,DirectXMan12/nova-hacking,felixma/nova,gspilio/nova,double12gzh/nova,devendermishrajio/nova,jianghuaw/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,sebrandon1/nova,akash1808/nova,saleemjaveds/https-github.com-openstack-nova,cloudbau/nova,blueboxgroup/nova,maelnor/nova,rickerc/nova_audit,CloudServer/nova,tangfeixiong/nova,eayunstack/nova,mgagne/nova,Yuriy-Leonov/nova,shahar-stratoscale/nova,hanlind/nova,TieWei/nova,rahulunair/nova,dstroppa/openstack-smartos-nova-grizzly,alaski/nova,mandeepdhami/nova,Juniper/nova,angdraug/nova,jianghuaw/nova,LoHChina/nova,sridevikoushik31/nova,paulmathews/nova,sridevikoushik31/openstack,iuliat/nova,cloudbase/nova,paulmathews/nova,belmiromoreira/nova,JianyuWang/nova,OpenAcademy-OpenStack/nova-scheduler,Francis-Liu/animated-broccoli,bclau/nova,fajoy/nova,openstack/nova,ntt-sic/nova,ewindisch/nova,leilihh/nova,petrutlucian94/nova,sacharya/nova,sridevikoushik31/nova,openstack/nova,redhat-openstack/nova,OpenAcademy-OpenStack/nova-scheduler,devendermishrajio/nova,savi-dev/nova,TwinkleChawla/nova,sridevikoushik31/openstack,sridevikoushik31/openstack,barnsnake351/nova,alaski/nova,BeyondTheClouds/nova,cernops/nova,isyippee/nova,zaina/nova,noironetworks/nova,Triv90/Nova,zaina/nova,tianweizhang/nova,shail2810/nova,mandeepdhami/nova,yosshy/nova,noironetworks/nova,vmturbo/nova,leilihh/novaha,projectcalico/calico-nova,maheshp/novatest,houshengbo/nova_vmware_compute_driver,hanlind/nova,iuliat/nova,devendermishrajio/nova_test_latest,alexandrucoman/vbox-nova-driver,isyippee/nova,takeshineshiro/nova,dstroppa/openstack-smartos-nova-grizzly,tudorvio/nova,salv-orlando/MyRepo,mahak/nova,gooddata/openstack-nova,Tehsmash/nova,ruslanloman/nova,affo/nova,savi-dev/nova,shahar-stratoscale/nova,cyx1231st/nova,gooddata/openstack-nova,Brocade-OpenSource/OpenStack-DNRM-Nova
|
from nova import flags
from nova import utils
FLAGS = flags.FLAGS
flags.DEFINE_string('ipv6_backend',
'rfc2462',
'Backend to use for IPv6 generation')
def reset_backend():
global IMPL
IMPL = utils.LazyPluggable(FLAGS['ipv6_backend'],
- rfc2462='nova.ipv6.rfc2462',
+ rfc2462='nova.ipv6.rfc2462',
- account_identifier='nova.ipv6.account_identifier')
+ account_identifier='nova.ipv6.account_identifier')
def to_global(prefix, mac, project_id):
return IMPL.to_global(prefix, mac, project_id)
def to_mac(ipv6_address):
return IMPL.to_mac(ipv6_address)
reset_backend()
|
Reduce indentation to avoid PEP8 failures
|
## Code Before:
from nova import flags
from nova import utils
FLAGS = flags.FLAGS
flags.DEFINE_string('ipv6_backend',
'rfc2462',
'Backend to use for IPv6 generation')
def reset_backend():
global IMPL
IMPL = utils.LazyPluggable(FLAGS['ipv6_backend'],
rfc2462='nova.ipv6.rfc2462',
account_identifier='nova.ipv6.account_identifier')
def to_global(prefix, mac, project_id):
return IMPL.to_global(prefix, mac, project_id)
def to_mac(ipv6_address):
return IMPL.to_mac(ipv6_address)
reset_backend()
## Instruction:
Reduce indentation to avoid PEP8 failures
## Code After:
from nova import flags
from nova import utils
FLAGS = flags.FLAGS
flags.DEFINE_string('ipv6_backend',
'rfc2462',
'Backend to use for IPv6 generation')
def reset_backend():
global IMPL
IMPL = utils.LazyPluggable(FLAGS['ipv6_backend'],
rfc2462='nova.ipv6.rfc2462',
account_identifier='nova.ipv6.account_identifier')
def to_global(prefix, mac, project_id):
return IMPL.to_global(prefix, mac, project_id)
def to_mac(ipv6_address):
return IMPL.to_mac(ipv6_address)
reset_backend()
|
...
IMPL = utils.LazyPluggable(FLAGS['ipv6_backend'],
rfc2462='nova.ipv6.rfc2462',
account_identifier='nova.ipv6.account_identifier')
...
|
047483d9897e75f8284c39e8477a285763da7b37
|
heufybot/modules/util/commandhandler.py
|
heufybot/modules/util/commandhandler.py
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
if not message["body"].startswith(commandPrefix):
return # We don't need to be handling things that aren't bot commands
params = message["body"].split()
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
botNick = self.bot.servers[message["server"]].nick.lower()
params = message["body"].split()
if message["body"].startswith(commandPrefix):
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
elif message["body"].lower().startswith(botNick):
message["command"] = params[1]
del params[0:2]
else:
return # We don't need to be handling things that aren't bot commands
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
|
Make the bot respond to its name
|
Make the bot respond to its name
Implements GH-7
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
+ botNick = self.bot.servers[message["server"]].nick.lower()
+ params = message["body"].split()
+
- if not message["body"].startswith(commandPrefix):
+ if message["body"].startswith(commandPrefix):
+ message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
+ del params[0]
+ elif message["body"].lower().startswith(botNick):
+ message["command"] = params[1]
+ del params[0:2]
+ else:
return # We don't need to be handling things that aren't bot commands
- params = message["body"].split()
- message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
- del params[0]
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
|
Make the bot respond to its name
|
## Code Before:
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
if not message["body"].startswith(commandPrefix):
return # We don't need to be handling things that aren't bot commands
params = message["body"].split()
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
## Instruction:
Make the bot respond to its name
## Code After:
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
botNick = self.bot.servers[message["server"]].nick.lower()
params = message["body"].split()
if message["body"].startswith(commandPrefix):
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
elif message["body"].lower().startswith(botNick):
message["command"] = params[1]
del params[0:2]
else:
return # We don't need to be handling things that aren't bot commands
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
|
...
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
botNick = self.bot.servers[message["server"]].nick.lower()
params = message["body"].split()
if message["body"].startswith(commandPrefix):
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
elif message["body"].lower().startswith(botNick):
message["command"] = params[1]
del params[0:2]
else:
return # We don't need to be handling things that aren't bot commands
message["params"] = params
...
|
49c4b3a35aa8c50740761be6e84e3439d8084458
|
main.py
|
main.py
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
lastPingTime = 0
while True:
for message in slackClient.rtm_read():
if message["type"] == "team_join":
username = message["user"]["name"]
message = "Welcome to the New Ro-Bots Slack, @{}! Please make sure to download this on your phone so we can get your attention! The app is available on both iOS and Android.".format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=message, username="The New Ro-Bot", icon_emoji=":wave:",
link_names = True)
now = time.time()
if now - lastPingTime >= 3:
slackClient.server.ping()
lastPingTime = now
time.sleep(.1)
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
lastPingTime = 0
while True:
for message in slackClient.rtm_read():
if message["type"] == "team_join":
username = message["user"]["name"]
message = {}.format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=message, username="The New Ro-Bot", icon_emoji=":wave:",
link_names = True)
now = time.time()
if now - lastPingTime >= 3:
slackClient.server.ping()
lastPingTime = now
time.sleep(.1)
|
Replace string withe nvironment variable
|
Replace string withe nvironment variable
|
Python
|
mit
|
ollien/Slack-Welcome-Bot
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
lastPingTime = 0
while True:
for message in slackClient.rtm_read():
if message["type"] == "team_join":
username = message["user"]["name"]
- message = "Welcome to the New Ro-Bots Slack, @{}! Please make sure to download this on your phone so we can get your attention! The app is available on both iOS and Android.".format(username)
+ message = {}.format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=message, username="The New Ro-Bot", icon_emoji=":wave:",
link_names = True)
now = time.time()
if now - lastPingTime >= 3:
slackClient.server.ping()
lastPingTime = now
time.sleep(.1)
|
Replace string withe nvironment variable
|
## Code Before:
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
lastPingTime = 0
while True:
for message in slackClient.rtm_read():
if message["type"] == "team_join":
username = message["user"]["name"]
message = "Welcome to the New Ro-Bots Slack, @{}! Please make sure to download this on your phone so we can get your attention! The app is available on both iOS and Android.".format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=message, username="The New Ro-Bot", icon_emoji=":wave:",
link_names = True)
now = time.time()
if now - lastPingTime >= 3:
slackClient.server.ping()
lastPingTime = now
time.sleep(.1)
## Instruction:
Replace string withe nvironment variable
## Code After:
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
lastPingTime = 0
while True:
for message in slackClient.rtm_read():
if message["type"] == "team_join":
username = message["user"]["name"]
message = {}.format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=message, username="The New Ro-Bot", icon_emoji=":wave:",
link_names = True)
now = time.time()
if now - lastPingTime >= 3:
slackClient.server.ping()
lastPingTime = now
time.sleep(.1)
|
# ... existing code ...
username = message["user"]["name"]
message = {}.format(username)
slackClient.api_call("chat.postMessage", channel="#general",
# ... rest of the code ...
|
f45fc8854647754b24df5f9601920368cd2d3c49
|
tests/chainerx_tests/unit_tests/test_cuda.py
|
tests/chainerx_tests/unit_tests/test_cuda.py
|
import pytest
from chainerx import _cuda
try:
import cupy
except Exception:
cupy = None
class CupyTestMemoryHook(cupy.cuda.memory_hook.MemoryHook):
name = 'CupyTestMemoryHook'
def __init__(self):
self.used_bytes = 0
self.acquired_bytes = 0
def alloc_preprocess(self, **kwargs):
self.acquired_bytes += kwargs['mem_size']
def malloc_preprocess(self, **kwargs):
self.used_bytes += kwargs['mem_size']
@pytest.mark.cuda()
def test_cupy_share_allocator():
with CupyTestMemoryHook() as hook:
cp_allocated = cupy.arange(10)
used_bytes = hook.used_bytes
acquired_bytes = hook.acquired_bytes
# Create a new array after changing the allocator to the memory pool
# of ChainerX and make sure that no additional memory has been
# allocated by CuPy.
_cuda.cupy_share_allocator()
chx_allocated = cupy.arange(10)
cupy.testing.assert_array_equal(cp_allocated, chx_allocated)
assert used_bytes == hook.used_bytes
assert acquired_bytes == hook.acquired_bytes
|
import pytest
from chainerx import _cuda
try:
import cupy
except Exception:
cupy = None
class CupyTestMemoryHook(cupy.cuda.memory_hook.MemoryHook):
name = 'CupyTestMemoryHook'
def __init__(self):
self.used_bytes = 0
self.acquired_bytes = 0
def alloc_preprocess(self, **kwargs):
self.acquired_bytes += kwargs['mem_size']
def malloc_preprocess(self, **kwargs):
self.used_bytes += kwargs['mem_size']
@pytest.mark.cuda()
def test_cupy_share_allocator():
with CupyTestMemoryHook() as hook:
cp_allocated = cupy.arange(10)
used_bytes = hook.used_bytes
acquired_bytes = hook.acquired_bytes
assert used_bytes > 0
assert acquired_bytes > 0
# Create a new array after changing the allocator to the memory pool
# of ChainerX and make sure that no additional memory has been
# allocated by CuPy.
_cuda.cupy_share_allocator()
chx_allocated = cupy.arange(10)
cupy.testing.assert_array_equal(cp_allocated, chx_allocated)
assert used_bytes == hook.used_bytes
assert acquired_bytes == hook.acquired_bytes
|
Add safety checks in test
|
Add safety checks in test
|
Python
|
mit
|
wkentaro/chainer,hvy/chainer,niboshi/chainer,okuta/chainer,chainer/chainer,wkentaro/chainer,chainer/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,hvy/chainer,pfnet/chainer,hvy/chainer,chainer/chainer,keisuke-umezawa/chainer,okuta/chainer,chainer/chainer,tkerola/chainer,keisuke-umezawa/chainer,wkentaro/chainer,niboshi/chainer,hvy/chainer,okuta/chainer,niboshi/chainer,niboshi/chainer,okuta/chainer,wkentaro/chainer
|
import pytest
from chainerx import _cuda
try:
import cupy
except Exception:
cupy = None
class CupyTestMemoryHook(cupy.cuda.memory_hook.MemoryHook):
name = 'CupyTestMemoryHook'
def __init__(self):
self.used_bytes = 0
self.acquired_bytes = 0
def alloc_preprocess(self, **kwargs):
self.acquired_bytes += kwargs['mem_size']
def malloc_preprocess(self, **kwargs):
self.used_bytes += kwargs['mem_size']
@pytest.mark.cuda()
def test_cupy_share_allocator():
with CupyTestMemoryHook() as hook:
cp_allocated = cupy.arange(10)
used_bytes = hook.used_bytes
acquired_bytes = hook.acquired_bytes
+ assert used_bytes > 0
+ assert acquired_bytes > 0
# Create a new array after changing the allocator to the memory pool
# of ChainerX and make sure that no additional memory has been
# allocated by CuPy.
_cuda.cupy_share_allocator()
chx_allocated = cupy.arange(10)
cupy.testing.assert_array_equal(cp_allocated, chx_allocated)
assert used_bytes == hook.used_bytes
assert acquired_bytes == hook.acquired_bytes
|
Add safety checks in test
|
## Code Before:
import pytest
from chainerx import _cuda
try:
import cupy
except Exception:
cupy = None
class CupyTestMemoryHook(cupy.cuda.memory_hook.MemoryHook):
name = 'CupyTestMemoryHook'
def __init__(self):
self.used_bytes = 0
self.acquired_bytes = 0
def alloc_preprocess(self, **kwargs):
self.acquired_bytes += kwargs['mem_size']
def malloc_preprocess(self, **kwargs):
self.used_bytes += kwargs['mem_size']
@pytest.mark.cuda()
def test_cupy_share_allocator():
with CupyTestMemoryHook() as hook:
cp_allocated = cupy.arange(10)
used_bytes = hook.used_bytes
acquired_bytes = hook.acquired_bytes
# Create a new array after changing the allocator to the memory pool
# of ChainerX and make sure that no additional memory has been
# allocated by CuPy.
_cuda.cupy_share_allocator()
chx_allocated = cupy.arange(10)
cupy.testing.assert_array_equal(cp_allocated, chx_allocated)
assert used_bytes == hook.used_bytes
assert acquired_bytes == hook.acquired_bytes
## Instruction:
Add safety checks in test
## Code After:
import pytest
from chainerx import _cuda
try:
import cupy
except Exception:
cupy = None
class CupyTestMemoryHook(cupy.cuda.memory_hook.MemoryHook):
name = 'CupyTestMemoryHook'
def __init__(self):
self.used_bytes = 0
self.acquired_bytes = 0
def alloc_preprocess(self, **kwargs):
self.acquired_bytes += kwargs['mem_size']
def malloc_preprocess(self, **kwargs):
self.used_bytes += kwargs['mem_size']
@pytest.mark.cuda()
def test_cupy_share_allocator():
with CupyTestMemoryHook() as hook:
cp_allocated = cupy.arange(10)
used_bytes = hook.used_bytes
acquired_bytes = hook.acquired_bytes
assert used_bytes > 0
assert acquired_bytes > 0
# Create a new array after changing the allocator to the memory pool
# of ChainerX and make sure that no additional memory has been
# allocated by CuPy.
_cuda.cupy_share_allocator()
chx_allocated = cupy.arange(10)
cupy.testing.assert_array_equal(cp_allocated, chx_allocated)
assert used_bytes == hook.used_bytes
assert acquired_bytes == hook.acquired_bytes
|
// ... existing code ...
acquired_bytes = hook.acquired_bytes
assert used_bytes > 0
assert acquired_bytes > 0
// ... rest of the code ...
|
683765c26e0c852d06fd06a491e3906369ae14cd
|
votes/urls.py
|
votes/urls.py
|
from django.conf.urls import include, url
from django.views.generic import TemplateView
from votes.views import VoteView
urlpatterns = [
url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view()),
]
|
from django.conf.urls import include, url
from django.views.generic import TemplateView
from votes.views import VoteView
urlpatterns = [
url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view(), name="vote"),
]
|
Add name to vote view URL
|
Add name to vote view URL
|
Python
|
mit
|
kuboschek/jay,kuboschek/jay,OpenJUB/jay,kuboschek/jay,OpenJUB/jay,OpenJUB/jay
|
from django.conf.urls import include, url
from django.views.generic import TemplateView
from votes.views import VoteView
urlpatterns = [
- url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view()),
+ url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view(), name="vote"),
]
|
Add name to vote view URL
|
## Code Before:
from django.conf.urls import include, url
from django.views.generic import TemplateView
from votes.views import VoteView
urlpatterns = [
url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view()),
]
## Instruction:
Add name to vote view URL
## Code After:
from django.conf.urls import include, url
from django.views.generic import TemplateView
from votes.views import VoteView
urlpatterns = [
url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view(), name="vote"),
]
|
// ... existing code ...
urlpatterns = [
url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view(), name="vote"),
]
// ... rest of the code ...
|
966c22d4bae270a14176ae1c7b9887eb55743612
|
tests/conftest.py
|
tests/conftest.py
|
import datetime
import odin.datetimeutil
ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12)
MWT = odin.datetimeutil.FixedTimezone(-6, 'Mountain War Time')
BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
|
import os
import sys
import datetime
import odin.datetimeutil
HERE = os.path.abspath(os.path.dirname(__file__))
SRC = os.path.normpath(os.path.join(HERE, "..", "src"))
sys.path.insert(0, SRC)
ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12)
MWT = odin.datetimeutil.FixedTimezone(-6, "Mountain War Time")
BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
|
Update tests to find source path
|
Update tests to find source path
|
Python
|
bsd-3-clause
|
python-odin/odin
|
+ import os
+ import sys
import datetime
import odin.datetimeutil
+ HERE = os.path.abspath(os.path.dirname(__file__))
+ SRC = os.path.normpath(os.path.join(HERE, "..", "src"))
+ sys.path.insert(0, SRC)
+
ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12)
- MWT = odin.datetimeutil.FixedTimezone(-6, 'Mountain War Time')
+ MWT = odin.datetimeutil.FixedTimezone(-6, "Mountain War Time")
BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
|
Update tests to find source path
|
## Code Before:
import datetime
import odin.datetimeutil
ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12)
MWT = odin.datetimeutil.FixedTimezone(-6, 'Mountain War Time')
BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
## Instruction:
Update tests to find source path
## Code After:
import os
import sys
import datetime
import odin.datetimeutil
HERE = os.path.abspath(os.path.dirname(__file__))
SRC = os.path.normpath(os.path.join(HERE, "..", "src"))
sys.path.insert(0, SRC)
ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12)
MWT = odin.datetimeutil.FixedTimezone(-6, "Mountain War Time")
BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
|
...
import os
import sys
import datetime
...
HERE = os.path.abspath(os.path.dirname(__file__))
SRC = os.path.normpath(os.path.join(HERE, "..", "src"))
sys.path.insert(0, SRC)
ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12)
MWT = odin.datetimeutil.FixedTimezone(-6, "Mountain War Time")
BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
...
|
3f543f9e3a328441ae477ca3fb299fbc86ffc40f
|
oneflow/base/tasks.py
|
oneflow/base/tasks.py
|
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.')
# TODO: force re-auth of user at next visit.
|
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.',
user.username)
social.delete()
|
Fix for missing username and clear the social_auth to force re-authentication at next login.
|
Fix for missing username and clear the social_auth to force re-authentication at next login.
|
Python
|
agpl-3.0
|
WillianPaiva/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow
|
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
- u'%s, forcing re-authentication at next login.')
+ u'%s, forcing re-authentication at next login.',
+ user.username)
- # TODO: force re-auth of user at next visit.
+ social.delete()
|
Fix for missing username and clear the social_auth to force re-authentication at next login.
|
## Code Before:
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.')
# TODO: force re-auth of user at next visit.
## Instruction:
Fix for missing username and clear the social_auth to force re-authentication at next login.
## Code After:
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.',
user.username)
social.delete()
|
# ... existing code ...
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.',
user.username)
social.delete()
# ... rest of the code ...
|
90b2c2b546aa6c4707273be29fe83c2ea36e0ad5
|
panoptes/state_machine/states/parked.py
|
panoptes/state_machine/states/parked.py
|
from . import PanState
""" Parked State
The Parked state occurs in the following conditions:
* Daytime
* Bad Weather
* System error
As such, the state needs to check for a number of conditions.
"""
class State(PanState):
def main(self):
return 'exit'
|
from . import PanState
class State(PanState):
def main(self):
next_state = 'shutdown'
# mount = self.panoptes.observatory.mount
self.logger.info("I'm parked now.")
return next_state
|
Change Parked state to something silly
|
Change Parked state to something silly
|
Python
|
mit
|
AstroHuntsman/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,panoptes/POCS,panoptes/POCS,panoptes/POCS,AstroHuntsman/POCS,panoptes/POCS,joshwalawender/POCS,joshwalawender/POCS,joshwalawender/POCS
|
from . import PanState
- """ Parked State
-
- The Parked state occurs in the following conditions:
- * Daytime
- * Bad Weather
- * System error
-
- As such, the state needs to check for a number of conditions.
- """
class State(PanState):
+
def main(self):
-
- return 'exit'
+ next_state = 'shutdown'
+ # mount = self.panoptes.observatory.mount
+
+ self.logger.info("I'm parked now.")
+
+ return next_state
+
|
Change Parked state to something silly
|
## Code Before:
from . import PanState
""" Parked State
The Parked state occurs in the following conditions:
* Daytime
* Bad Weather
* System error
As such, the state needs to check for a number of conditions.
"""
class State(PanState):
def main(self):
return 'exit'
## Instruction:
Change Parked state to something silly
## Code After:
from . import PanState
class State(PanState):
def main(self):
next_state = 'shutdown'
# mount = self.panoptes.observatory.mount
self.logger.info("I'm parked now.")
return next_state
|
// ... existing code ...
// ... modified code ...
class State(PanState):
def main(self):
next_state = 'shutdown'
# mount = self.panoptes.observatory.mount
self.logger.info("I'm parked now.")
return next_state
// ... rest of the code ...
|
cc44afdca3ebcdaeed3555f161d3e0a1992c19eb
|
planet/api/__init__.py
|
planet/api/__init__.py
|
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
Put api.__version__ back in after version shuffle
|
Put api.__version__ back in after version shuffle
|
Python
|
apache-2.0
|
planetlabs/planet-client-python,planetlabs/planet-client-python
|
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
+ from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
Put api.__version__ back in after version shuffle
|
## Code Before:
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
## Instruction:
Put api.__version__ back in after version shuffle
## Code After:
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
...
from . import filters
from .__version__ import __version__ # NOQA
...
|
56661432ea78f193346fe8bcf33bd19a2e1787bc
|
tests/test_manager.py
|
tests/test_manager.py
|
def test_ensure_authority(manager_transaction):
authority = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority.name == 'Test Authority'
assert authority.rank == 0
assert authority.cardinality == 1234
|
def test_ensure_authority(manager_transaction):
authority1 = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority1.name == 'Test Authority'
assert authority1.rank == 0
assert authority1.cardinality == 1234
authority2 = manager_transaction.ensure_authority(
name='Test Authority',
rank=1,
cardinality=2345
)
assert authority1 is authority2
assert authority2.name == 'Test Authority'
assert authority2.rank == 1
assert authority2.cardinality == 2345
|
Test ensure_authority for both nonexistent and already existing Authority records.
|
Test ensure_authority for both nonexistent and already existing Authority records.
|
Python
|
mit
|
scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash
|
def test_ensure_authority(manager_transaction):
- authority = manager_transaction.ensure_authority(
+ authority1 = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
+ assert authority1.name == 'Test Authority'
+ assert authority1.rank == 0
+ assert authority1.cardinality == 1234
+ authority2 = manager_transaction.ensure_authority(
+ name='Test Authority',
+ rank=1,
+ cardinality=2345
+ )
+ assert authority1 is authority2
- assert authority.name == 'Test Authority'
+ assert authority2.name == 'Test Authority'
- assert authority.rank == 0
+ assert authority2.rank == 1
- assert authority.cardinality == 1234
+ assert authority2.cardinality == 2345
|
Test ensure_authority for both nonexistent and already existing Authority records.
|
## Code Before:
def test_ensure_authority(manager_transaction):
authority = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority.name == 'Test Authority'
assert authority.rank == 0
assert authority.cardinality == 1234
## Instruction:
Test ensure_authority for both nonexistent and already existing Authority records.
## Code After:
def test_ensure_authority(manager_transaction):
authority1 = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority1.name == 'Test Authority'
assert authority1.rank == 0
assert authority1.cardinality == 1234
authority2 = manager_transaction.ensure_authority(
name='Test Authority',
rank=1,
cardinality=2345
)
assert authority1 is authority2
assert authority2.name == 'Test Authority'
assert authority2.rank == 1
assert authority2.cardinality == 2345
|
...
def test_ensure_authority(manager_transaction):
authority1 = manager_transaction.ensure_authority(
name='Test Authority',
...
)
assert authority1.name == 'Test Authority'
assert authority1.rank == 0
assert authority1.cardinality == 1234
authority2 = manager_transaction.ensure_authority(
name='Test Authority',
rank=1,
cardinality=2345
)
assert authority1 is authority2
assert authority2.name == 'Test Authority'
assert authority2.rank == 1
assert authority2.cardinality == 2345
...
|
5531f188c7bf3030cb9fc3b46d92a1db60817b7c
|
confirmation/views.py
|
confirmation/views.py
|
__revision__ = '$Id: views.py 21 2008-12-05 09:21:03Z jarek.zgoda $'
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.conf import settings
from confirmation.models import Confirmation
def confirm(request, confirmation_key):
confirmation_key = confirmation_key.lower()
obj = Confirmation.objects.confirm(confirmation_key)
confirmed = True
if not obj:
# confirmation failed
confirmed = False
try:
# try to get the object we was supposed to confirm
obj = Confirmation.objects.get(confirmation_key=confirmation_key)
except Confirmation.DoesNotExist:
pass
ctx = {
'object': obj,
'confirmed': confirmed,
'days': getattr(settings, 'EMAIL_CONFIRMATION_DAYS', 10),
}
templates = [
'confirmation/confirm.html',
]
if obj:
# if we have an object, we can use specific template
templates.insert(0, 'confirmation/confirm_%s.html' % obj._meta.module_name)
return render_to_response(templates, ctx,
context_instance=RequestContext(request))
|
__revision__ = '$Id: views.py 21 2008-12-05 09:21:03Z jarek.zgoda $'
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.conf import settings
from confirmation.models import Confirmation
def confirm(request, confirmation_key):
confirmation_key = confirmation_key.lower()
obj = Confirmation.objects.confirm(confirmation_key)
confirmed = True
if not obj:
# confirmation failed
confirmed = False
try:
# try to get the object we was supposed to confirm
obj = Confirmation.objects.get(confirmation_key=confirmation_key)
except Confirmation.DoesNotExist:
pass
ctx = {
'object': obj,
'confirmed': confirmed,
'days': getattr(settings, 'EMAIL_CONFIRMATION_DAYS', 10),
'key': confirmation_key,
}
templates = [
'confirmation/confirm.html',
]
if obj:
# if we have an object, we can use specific template
templates.insert(0, 'confirmation/confirm_%s.html' % obj._meta.module_name)
return render_to_response(templates, ctx,
context_instance=RequestContext(request))
|
Include confirmation key in context object.
|
Include confirmation key in context object.
This way our templates can reference the confirmation key later.
(imported from commit 4d57e1309386f2236829b6fdf4e4ad43c5b125c8)
|
Python
|
apache-2.0
|
schatt/zulip,deer-hope/zulip,PaulPetring/zulip,adnanh/zulip,jeffcao/zulip,hafeez3000/zulip,fw1121/zulip,KJin99/zulip,ufosky-server/zulip,mahim97/zulip,dhcrzf/zulip,dawran6/zulip,firstblade/zulip,akuseru/zulip,eastlhu/zulip,showell/zulip,dawran6/zulip,vikas-parashar/zulip,esander91/zulip,eastlhu/zulip,so0k/zulip,johnny9/zulip,amallia/zulip,sharmaeklavya2/zulip,mansilladev/zulip,krtkmj/zulip,dawran6/zulip,proliming/zulip,rht/zulip,jackrzhang/zulip,noroot/zulip,eastlhu/zulip,deer-hope/zulip,firstblade/zulip,wweiradio/zulip,jackrzhang/zulip,punchagan/zulip,peiwei/zulip,showell/zulip,Cheppers/zulip,guiquanz/zulip,seapasulli/zulip,hackerkid/zulip,jrowan/zulip,ipernet/zulip,dotcool/zulip,esander91/zulip,kou/zulip,Galexrt/zulip,nicholasbs/zulip,PhilSk/zulip,zwily/zulip,mdavid/zulip,amanharitsh123/zulip,Drooids/zulip,kokoar/zulip,karamcnair/zulip,stamhe/zulip,easyfmxu/zulip,PaulPetring/zulip,sharmaeklavya2/zulip,ahmadassaf/zulip,saitodisse/zulip,levixie/zulip,arpith/zulip,Drooids/zulip,zwily/zulip,gigawhitlocks/zulip,Drooids/zulip,dxq-git/zulip,swinghu/zulip,jonesgithub/zulip,thomasboyt/zulip,itnihao/zulip,dwrpayne/zulip,ashwinirudrappa/zulip,armooo/zulip,jimmy54/zulip,jessedhillon/zulip,LeeRisk/zulip,samatdav/zulip,zorojean/zulip,proliming/zulip,jerryge/zulip,fw1121/zulip,Suninus/zulip,ryanbackman/zulip,andersk/zulip,luyifan/zulip,pradiptad/zulip,joshisa/zulip,swinghu/zulip,armooo/zulip,grave-w-grave/zulip,Suninus/zulip,udxxabp/zulip,firstblade/zulip,akuseru/zulip,shubhamdhama/zulip,atomic-labs/zulip,bluesea/zulip,AZtheAsian/zulip,rht/zulip,hayderimran7/zulip,akuseru/zulip,peiwei/zulip,codeKonami/zulip,jerryge/zulip,fw1121/zulip,kou/zulip,shaunstanislaus/zulip,johnnygaddarr/zulip,kokoar/zulip,Frouk/zulip,samatdav/zulip,hackerkid/zulip,Frouk/zulip,saitodisse/zulip,hackerkid/zulip,lfranchi/zulip,ApsOps/zulip,sonali0901/zulip,voidException/zulip,atomic-labs/zulip,zachallaun/zulip,wdaher/zulip,noroot/zulip,zulip/zulip,xuxiao/zulip,gigawhitlocks/zulip,hackerkid/zulip,Vallher/zulip,susansls/zulip,reyha/zulip,paxapy/zulip,ryansnowboarder/zulip,xuxiao/zulip,ahmadassaf/zulip,dattatreya303/zulip,rht/zulip,shubhamdhama/zulip,jainayush975/zulip,noroot/zulip,TigorC/zulip,vabs22/zulip,luyifan/zulip,synicalsyntax/zulip,lfranchi/zulip,dwrpayne/zulip,kaiyuanheshang/zulip,dnmfarrell/zulip,kokoar/zulip,moria/zulip,jainayush975/zulip,thomasboyt/zulip,j831/zulip,wangdeshui/zulip,timabbott/zulip,levixie/zulip,vakila/zulip,akuseru/zulip,m1ssou/zulip,luyifan/zulip,bssrdf/zulip,johnnygaddarr/zulip,punchagan/zulip,vabs22/zulip,sharmaeklavya2/zulip,amallia/zulip,schatt/zulip,qq1012803704/zulip,wweiradio/zulip,showell/zulip,deer-hope/zulip,zachallaun/zulip,christi3k/zulip,Qgap/zulip,christi3k/zulip,zwily/zulip,schatt/zulip,MariaFaBella85/zulip,peguin40/zulip,ryanbackman/zulip,babbage/zulip,natanovia/zulip,wdaher/zulip,jeffcao/zulip,noroot/zulip,he15his/zulip,TigorC/zulip,babbage/zulip,tbutter/zulip,wavelets/zulip,developerfm/zulip,hj3938/zulip,sup95/zulip,wdaher/zulip,samatdav/zulip,mohsenSy/zulip,jerryge/zulip,huangkebo/zulip,adnanh/zulip,mansilladev/zulip,shaunstanislaus/zulip,Frouk/zulip,Juanvulcano/zulip,hj3938/zulip,Drooids/zulip,levixie/zulip,qq1012803704/zulip,ryanbackman/zulip,TigorC/zulip,Gabriel0402/zulip,johnnygaddarr/zulip,stamhe/zulip,m1ssou/zulip,mahim97/zulip,mdavid/zulip,blaze225/zulip,glovebx/zulip,bluesea/zulip,jphilipsen05/zulip,nicholasbs/zulip,brockwhittaker/zulip,showell/zulip,sonali0901/zulip,sonali0901/zulip,bastianh/zulip,johnnygaddarr/zulip,udxxabp/zulip,Cheppers/zulip,amanharitsh123/zulip,deer-hope/zulip,alliejones/zulip,JPJPJPOPOP/zulip,lfranchi/zulip,Suninus/zulip,ryansnowboarder/zulip,babbage/zulip,KingxBanana/zulip,bitemyapp/zulip,shrikrishnaholla/zulip,sup95/zulip,so0k/zulip,dnmfarrell/zulip,Suninus/zulip,ikasumiwt/zulip,bowlofstew/zulip,tommyip/zulip,ApsOps/zulip,suxinde2009/zulip,suxinde2009/zulip,vaidap/zulip,akuseru/zulip,seapasulli/zulip,jeffcao/zulip,themass/zulip,Batterfii/zulip,zulip/zulip,ericzhou2008/zulip,vakila/zulip,Vallher/zulip,arpith/zulip,EasonYi/zulip,ryansnowboarder/zulip,willingc/zulip,eeshangarg/zulip,deer-hope/zulip,showell/zulip,itnihao/zulip,hayderimran7/zulip,shrikrishnaholla/zulip,technicalpickles/zulip,PhilSk/zulip,Qgap/zulip,kokoar/zulip,Qgap/zulip,sup95/zulip,tiansiyuan/zulip,aps-sids/zulip,karamcnair/zulip,Batterfii/zulip,bastianh/zulip,pradiptad/zulip,codeKonami/zulip,punchagan/zulip,dxq-git/zulip,ashwinirudrappa/zulip,samatdav/zulip,hackerkid/zulip,levixie/zulip,alliejones/zulip,stamhe/zulip,bluesea/zulip,DazWorrall/zulip,zachallaun/zulip,AZtheAsian/zulip,aliceriot/zulip,yuvipanda/zulip,Juanvulcano/zulip,shrikrishnaholla/zulip,arpith/zulip,ApsOps/zulip,zofuthan/zulip,zulip/zulip,zulip/zulip,seapasulli/zulip,arpitpanwar/zulip,ericzhou2008/zulip,thomasboyt/zulip,amyliu345/zulip,hayderimran7/zulip,willingc/zulip,hafeez3000/zulip,ashwinirudrappa/zulip,aps-sids/zulip,MayB/zulip,yocome/zulip,mahim97/zulip,ikasumiwt/zulip,tbutter/zulip,samatdav/zulip,blaze225/zulip,kou/zulip,cosmicAsymmetry/zulip,ipernet/zulip,ericzhou2008/zulip,MariaFaBella85/zulip,KJin99/zulip,joshisa/zulip,ufosky-server/zulip,seapasulli/zulip,andersk/zulip,niftynei/zulip,JanzTam/zulip,Suninus/zulip,suxinde2009/zulip,eeshangarg/zulip,vabs22/zulip,rishig/zulip,tommyip/zulip,dwrpayne/zulip,seapasulli/zulip,tommyip/zulip,bowlofstew/zulip,souravbadami/zulip,synicalsyntax/zulip,saitodisse/zulip,ikasumiwt/zulip,aakash-cr7/zulip,verma-varsha/zulip,timabbott/zulip,eeshangarg/zulip,seapasulli/zulip,pradiptad/zulip,timabbott/zulip,DazWorrall/zulip,vaidap/zulip,zacps/zulip,deer-hope/zulip,Cheppers/zulip,Galexrt/zulip,eastlhu/zulip,timabbott/zulip,glovebx/zulip,adnanh/zulip,vikas-parashar/zulip,ryansnowboarder/zulip,lfranchi/zulip,willingc/zulip,AZtheAsian/zulip,swinghu/zulip,fw1121/zulip,KingxBanana/zulip,wangdeshui/zulip,bitemyapp/zulip,verma-varsha/zulip,shrikrishnaholla/zulip,TigorC/zulip,Qgap/zulip,peguin40/zulip,nicholasbs/zulip,moria/zulip,firstblade/zulip,krtkmj/zulip,Juanvulcano/zulip,zachallaun/zulip,yuvipanda/zulip,natanovia/zulip,Juanvulcano/zulip,adnanh/zulip,suxinde2009/zulip,punchagan/zulip,j831/zulip,huangkebo/zulip,jeffcao/zulip,zhaoweigg/zulip,shrikrishnaholla/zulip,jeffcao/zulip,adnanh/zulip,eastlhu/zulip,firstblade/zulip,tommyip/zulip,huangkebo/zulip,bastianh/zulip,susansls/zulip,kaiyuanheshang/zulip,aps-sids/zulip,brainwane/zulip,zorojean/zulip,suxinde2009/zulip,niftynei/zulip,LeeRisk/zulip,MariaFaBella85/zulip,jerryge/zulip,developerfm/zulip,karamcnair/zulip,themass/zulip,MayB/zulip,hafeez3000/zulip,jackrzhang/zulip,so0k/zulip,gkotian/zulip,hengqujushi/zulip,itnihao/zulip,johnnygaddarr/zulip,LAndreas/zulip,rishig/zulip,amallia/zulip,avastu/zulip,yocome/zulip,proliming/zulip,Galexrt/zulip,stamhe/zulip,firstblade/zulip,dxq-git/zulip,bluesea/zulip,dxq-git/zulip,praveenaki/zulip,bluesea/zulip,xuanhan863/zulip,Frouk/zulip,ipernet/zulip,voidException/zulip,aps-sids/zulip,natanovia/zulip,karamcnair/zulip,peguin40/zulip,wavelets/zulip,bastianh/zulip,joshisa/zulip,jrowan/zulip,eeshangarg/zulip,huangkebo/zulip,Qgap/zulip,ufosky-server/zulip,DazWorrall/zulip,dawran6/zulip,proliming/zulip,avastu/zulip,PaulPetring/zulip,umkay/zulip,hafeez3000/zulip,zulip/zulip,susansls/zulip,jphilipsen05/zulip,vaidap/zulip,Frouk/zulip,proliming/zulip,souravbadami/zulip,amyliu345/zulip,Cheppers/zulip,verma-varsha/zulip,arpith/zulip,EasonYi/zulip,ashwinirudrappa/zulip,zachallaun/zulip,dwrpayne/zulip,zacps/zulip,johnny9/zulip,jimmy54/zulip,wdaher/zulip,cosmicAsymmetry/zulip,fw1121/zulip,ryansnowboarder/zulip,jonesgithub/zulip,mdavid/zulip,yuvipanda/zulip,johnny9/zulip,Cheppers/zulip,zorojean/zulip,tbutter/zulip,sonali0901/zulip,Juanvulcano/zulip,hustlzp/zulip,reyha/zulip,m1ssou/zulip,wavelets/zulip,wangdeshui/zulip,eeshangarg/zulip,guiquanz/zulip,schatt/zulip,saitodisse/zulip,JanzTam/zulip,verma-varsha/zulip,Jianchun1/zulip,esander91/zulip,johnny9/zulip,hengqujushi/zulip,yocome/zulip,vaidap/zulip,KingxBanana/zulip,natanovia/zulip,ikasumiwt/zulip,PaulPetring/zulip,AZtheAsian/zulip,aakash-cr7/zulip,j831/zulip,yuvipanda/zulip,armooo/zulip,hafeez3000/zulip,rishig/zulip,mohsenSy/zulip,mahim97/zulip,christi3k/zulip,Diptanshu8/zulip,littledogboy/zulip,he15his/zulip,dxq-git/zulip,guiquanz/zulip,peguin40/zulip,noroot/zulip,SmartPeople/zulip,technicalpickles/zulip,KJin99/zulip,umkay/zulip,wweiradio/zulip,hengqujushi/zulip,LAndreas/zulip,zulip/zulip,tiansiyuan/zulip,babbage/zulip,themass/zulip,Drooids/zulip,LeeRisk/zulip,jainayush975/zulip,TigorC/zulip,ikasumiwt/zulip,LAndreas/zulip,Cheppers/zulip,Gabriel0402/zulip,cosmicAsymmetry/zulip,joyhchen/zulip,mahim97/zulip,paxapy/zulip,kou/zulip,SmartPeople/zulip,zofuthan/zulip,AZtheAsian/zulip,wweiradio/zulip,dnmfarrell/zulip,vaidap/zulip,rishig/zulip,krtkmj/zulip,technicalpickles/zulip,brainwane/zulip,schatt/zulip,tdr130/zulip,andersk/zulip,amyliu345/zulip,karamcnair/zulip,jonesgithub/zulip,Vallher/zulip,ryanbackman/zulip,arpitpanwar/zulip,PaulPetring/zulip,synicalsyntax/zulip,akuseru/zulip,Suninus/zulip,EasonYi/zulip,umkay/zulip,joyhchen/zulip,alliejones/zulip,pradiptad/zulip,so0k/zulip,timabbott/zulip,arpith/zulip,hustlzp/zulip,blaze225/zulip,Vallher/zulip,johnny9/zulip,umkay/zulip,armooo/zulip,hayderimran7/zulip,aakash-cr7/zulip,littledogboy/zulip,punchagan/zulip,avastu/zulip,codeKonami/zulip,verma-varsha/zulip,PhilSk/zulip,dotcool/zulip,dattatreya303/zulip,tommyip/zulip,luyifan/zulip,rht/zulip,zhaoweigg/zulip,zacps/zulip,rht/zulip,joshisa/zulip,he15his/zulip,voidException/zulip,willingc/zulip,EasonYi/zulip,voidException/zulip,zwily/zulip,lfranchi/zulip,sonali0901/zulip,esander91/zulip,joshisa/zulip,souravbadami/zulip,esander91/zulip,RobotCaleb/zulip,praveenaki/zulip,Gabriel0402/zulip,amyliu345/zulip,zachallaun/zulip,swinghu/zulip,wangdeshui/zulip,synicalsyntax/zulip,brainwane/zulip,peiwei/zulip,arpitpanwar/zulip,punchagan/zulip,souravbadami/zulip,zorojean/zulip,AZtheAsian/zulip,zofuthan/zulip,xuanhan863/zulip,KingxBanana/zulip,saitodisse/zulip,Vallher/zulip,sup95/zulip,armooo/zulip,blaze225/zulip,showell/zulip,kaiyuanheshang/zulip,jessedhillon/zulip,ryanbackman/zulip,huangkebo/zulip,dhcrzf/zulip,arpitpanwar/zulip,ipernet/zulip,joyhchen/zulip,guiquanz/zulip,atomic-labs/zulip,amyliu345/zulip,KingxBanana/zulip,reyha/zulip,krtkmj/zulip,KJin99/zulip,johnny9/zulip,Cheppers/zulip,MayB/zulip,easyfmxu/zulip,esander91/zulip,themass/zulip,Juanvulcano/zulip,dotcool/zulip,so0k/zulip,amallia/zulip,souravbadami/zulip,voidException/zulip,DazWorrall/zulip,saitodisse/zulip,so0k/zulip,ApsOps/zulip,zhaoweigg/zulip,rishig/zulip,kou/zulip,tbutter/zulip,schatt/zulip,brockwhittaker/zulip,joshisa/zulip,hj3938/zulip,developerfm/zulip,aakash-cr7/zulip,ufosky-server/zulip,dxq-git/zulip,zacps/zulip,Qgap/zulip,grave-w-grave/zulip,fw1121/zulip,PhilSk/zulip,j831/zulip,christi3k/zulip,gigawhitlocks/zulip,hayderimran7/zulip,udxxabp/zulip,synicalsyntax/zulip,JPJPJPOPOP/zulip,paxapy/zulip,hustlzp/zulip,xuanhan863/zulip,ipernet/zulip,paxapy/zulip,glovebx/zulip,cosmicAsymmetry/zulip,grave-w-grave/zulip,amallia/zulip,mansilladev/zulip,mohsenSy/zulip,RobotCaleb/zulip,aakash-cr7/zulip,shaunstanislaus/zulip,Galexrt/zulip,jonesgithub/zulip,rht/zulip,easyfmxu/zulip,wangdeshui/zulip,eeshangarg/zulip,praveenaki/zulip,rishig/zulip,dnmfarrell/zulip,noroot/zulip,ApsOps/zulip,dwrpayne/zulip,zofuthan/zulip,xuxiao/zulip,zofuthan/zulip,yocome/zulip,guiquanz/zulip,mansilladev/zulip,easyfmxu/zulip,vikas-parashar/zulip,seapasulli/zulip,johnny9/zulip,ApsOps/zulip,ipernet/zulip,glovebx/zulip,thomasboyt/zulip,LeeRisk/zulip,sup95/zulip,reyha/zulip,eastlhu/zulip,he15his/zulip,amallia/zulip,zorojean/zulip,wweiradio/zulip,MariaFaBella85/zulip,mdavid/zulip,ufosky-server/zulip,zwily/zulip,JanzTam/zulip,jackrzhang/zulip,shaunstanislaus/zulip,JPJPJPOPOP/zulip,jonesgithub/zulip,tdr130/zulip,paxapy/zulip,jainayush975/zulip,kou/zulip,tdr130/zulip,hengqujushi/zulip,hj3938/zulip,dnmfarrell/zulip,shubhamdhama/zulip,ashwinirudrappa/zulip,synicalsyntax/zulip,bitemyapp/zulip,Gabriel0402/zulip,jimmy54/zulip,lfranchi/zulip,bitemyapp/zulip,Frouk/zulip,MayB/zulip,bastianh/zulip,technicalpickles/zulip,dotcool/zulip,voidException/zulip,ufosky-server/zulip,technicalpickles/zulip,Batterfii/zulip,willingc/zulip,reyha/zulip,ahmadassaf/zulip,gkotian/zulip,LAndreas/zulip,jessedhillon/zulip,EasonYi/zulip,ipernet/zulip,jrowan/zulip,vakila/zulip,gkotian/zulip,isht3/zulip,moria/zulip,dnmfarrell/zulip,gigawhitlocks/zulip,developerfm/zulip,bitemyapp/zulip,peiwei/zulip,ryanbackman/zulip,gigawhitlocks/zulip,niftynei/zulip,praveenaki/zulip,wweiradio/zulip,tbutter/zulip,nicholasbs/zulip,bssrdf/zulip,qq1012803704/zulip,ericzhou2008/zulip,wavelets/zulip,brainwane/zulip,ahmadassaf/zulip,RobotCaleb/zulip,huangkebo/zulip,isht3/zulip,calvinleenyc/zulip,stamhe/zulip,dhcrzf/zulip,udxxabp/zulip,sharmaeklavya2/zulip,calvinleenyc/zulip,thomasboyt/zulip,aliceriot/zulip,vikas-parashar/zulip,verma-varsha/zulip,hackerkid/zulip,andersk/zulip,itnihao/zulip,krtkmj/zulip,natanovia/zulip,qq1012803704/zulip,dotcool/zulip,jerryge/zulip,pradiptad/zulip,levixie/zulip,RobotCaleb/zulip,easyfmxu/zulip,Vallher/zulip,he15his/zulip,so0k/zulip,avastu/zulip,bowlofstew/zulip,EasonYi/zulip,Batterfii/zulip,brockwhittaker/zulip,dnmfarrell/zulip,ashwinirudrappa/zulip,stamhe/zulip,peiwei/zulip,shubhamdhama/zulip,xuanhan863/zulip,brainwane/zulip,ericzhou2008/zulip,vikas-parashar/zulip,jphilipsen05/zulip,amallia/zulip,aliceriot/zulip,wangdeshui/zulip,DazWorrall/zulip,bluesea/zulip,karamcnair/zulip,zachallaun/zulip,andersk/zulip,ikasumiwt/zulip,thomasboyt/zulip,grave-w-grave/zulip,DazWorrall/zulip,littledogboy/zulip,he15his/zulip,peiwei/zulip,aakash-cr7/zulip,udxxabp/zulip,hustlzp/zulip,praveenaki/zulip,umkay/zulip,MayB/zulip,jrowan/zulip,niftynei/zulip,mansilladev/zulip,jimmy54/zulip,andersk/zulip,reyha/zulip,dwrpayne/zulip,m1ssou/zulip,kaiyuanheshang/zulip,krtkmj/zulip,mansilladev/zulip,Gabriel0402/zulip,gigawhitlocks/zulip,SmartPeople/zulip,LeeRisk/zulip,joshisa/zulip,themass/zulip,aps-sids/zulip,aliceriot/zulip,jeffcao/zulip,alliejones/zulip,hj3938/zulip,showell/zulip,Diptanshu8/zulip,bssrdf/zulip,stamhe/zulip,codeKonami/zulip,esander91/zulip,littledogboy/zulip,schatt/zulip,gkotian/zulip,KJin99/zulip,zwily/zulip,jackrzhang/zulip,armooo/zulip,mohsenSy/zulip,ApsOps/zulip,luyifan/zulip,vakila/zulip,JPJPJPOPOP/zulip,RobotCaleb/zulip,zhaoweigg/zulip,tiansiyuan/zulip,mdavid/zulip,calvinleenyc/zulip,joyhchen/zulip,developerfm/zulip,shaunstanislaus/zulip,hafeez3000/zulip,hayderimran7/zulip,isht3/zulip,bssrdf/zulip,jackrzhang/zulip,umkay/zulip,xuanhan863/zulip,themass/zulip,kokoar/zulip,cosmicAsymmetry/zulip,vakila/zulip,zacps/zulip,noroot/zulip,SmartPeople/zulip,MariaFaBella85/zulip,KingxBanana/zulip,dotcool/zulip,LAndreas/zulip,johnnygaddarr/zulip,hustlzp/zulip,proliming/zulip,jessedhillon/zulip,Diptanshu8/zulip,hayderimran7/zulip,developerfm/zulip,arpith/zulip,ahmadassaf/zulip,Vallher/zulip,RobotCaleb/zulip,bowlofstew/zulip,shubhamdhama/zulip,jonesgithub/zulip,Jianchun1/zulip,bastianh/zulip,tdr130/zulip,aps-sids/zulip,qq1012803704/zulip,praveenaki/zulip,xuanhan863/zulip,Frouk/zulip,Drooids/zulip,xuxiao/zulip,willingc/zulip,MariaFaBella85/zulip,moria/zulip,krtkmj/zulip,moria/zulip,dattatreya303/zulip,Drooids/zulip,PhilSk/zulip,jimmy54/zulip,shaunstanislaus/zulip,Gabriel0402/zulip,hustlzp/zulip,shaunstanislaus/zulip,amyliu345/zulip,Diptanshu8/zulip,pradiptad/zulip,dhcrzf/zulip,jainayush975/zulip,timabbott/zulip,dhcrzf/zulip,babbage/zulip,PaulPetring/zulip,ahmadassaf/zulip,zwily/zulip,brainwane/zulip,samatdav/zulip,he15his/zulip,shrikrishnaholla/zulip,qq1012803704/zulip,qq1012803704/zulip,tdr130/zulip,kaiyuanheshang/zulip,peguin40/zulip,MariaFaBella85/zulip,Gabriel0402/zulip,PaulPetring/zulip,hj3938/zulip,zorojean/zulip,kaiyuanheshang/zulip,dhcrzf/zulip,amanharitsh123/zulip,dotcool/zulip,adnanh/zulip,aliceriot/zulip,technicalpickles/zulip,sonali0901/zulip,brockwhittaker/zulip,JanzTam/zulip,KJin99/zulip,hengqujushi/zulip,udxxabp/zulip,codeKonami/zulip,timabbott/zulip,SmartPeople/zulip,susansls/zulip,vakila/zulip,xuxiao/zulip,blaze225/zulip,hengqujushi/zulip,natanovia/zulip,jrowan/zulip,peiwei/zulip,aliceriot/zulip,nicholasbs/zulip,rht/zulip,amanharitsh123/zulip,vabs22/zulip,adnanh/zulip,m1ssou/zulip,Batterfii/zulip,yocome/zulip,tommyip/zulip,mdavid/zulip,yuvipanda/zulip,LAndreas/zulip,atomic-labs/zulip,ryansnowboarder/zulip,Jianchun1/zulip,bowlofstew/zulip,shubhamdhama/zulip,isht3/zulip,Diptanshu8/zulip,jphilipsen05/zulip,luyifan/zulip,yocome/zulip,vabs22/zulip,tiansiyuan/zulip,dxq-git/zulip,vakila/zulip,atomic-labs/zulip,hafeez3000/zulip,Diptanshu8/zulip,eeshangarg/zulip,MayB/zulip,mdavid/zulip,tiansiyuan/zulip,SmartPeople/zulip,ikasumiwt/zulip,udxxabp/zulip,brockwhittaker/zulip,blaze225/zulip,bitemyapp/zulip,suxinde2009/zulip,tiansiyuan/zulip,wweiradio/zulip,j831/zulip,mansilladev/zulip,susansls/zulip,ashwinirudrappa/zulip,luyifan/zulip,PhilSk/zulip,voidException/zulip,jonesgithub/zulip,aliceriot/zulip,calvinleenyc/zulip,avastu/zulip,zulip/zulip,synicalsyntax/zulip,bowlofstew/zulip,m1ssou/zulip,Jianchun1/zulip,codeKonami/zulip,dwrpayne/zulip,lfranchi/zulip,jphilipsen05/zulip,suxinde2009/zulip,babbage/zulip,vikas-parashar/zulip,kaiyuanheshang/zulip,JanzTam/zulip,jackrzhang/zulip,mohsenSy/zulip,niftynei/zulip,firstblade/zulip,yocome/zulip,jessedhillon/zulip,grave-w-grave/zulip,LeeRisk/zulip,jerryge/zulip,Batterfii/zulip,punchagan/zulip,avastu/zulip,sharmaeklavya2/zulip,bowlofstew/zulip,ericzhou2008/zulip,amanharitsh123/zulip,hustlzp/zulip,jessedhillon/zulip,JanzTam/zulip,saitodisse/zulip,bssrdf/zulip,shrikrishnaholla/zulip,sup95/zulip,gigawhitlocks/zulip,amanharitsh123/zulip,kokoar/zulip,tommyip/zulip,wdaher/zulip,brainwane/zulip,swinghu/zulip,xuanhan863/zulip,huangkebo/zulip,ufosky-server/zulip,brockwhittaker/zulip,atomic-labs/zulip,jimmy54/zulip,Suninus/zulip,zofuthan/zulip,jainayush975/zulip,littledogboy/zulip,wavelets/zulip,wavelets/zulip,yuvipanda/zulip,moria/zulip,alliejones/zulip,Batterfii/zulip,johnnygaddarr/zulip,TigorC/zulip,christi3k/zulip,glovebx/zulip,tiansiyuan/zulip,willingc/zulip,umkay/zulip,swinghu/zulip,jerryge/zulip,Galexrt/zulip,joyhchen/zulip,thomasboyt/zulip,Jianchun1/zulip,natanovia/zulip,DazWorrall/zulip,zorojean/zulip,nicholasbs/zulip,cosmicAsymmetry/zulip,gkotian/zulip,arpitpanwar/zulip,bssrdf/zulip,zofuthan/zulip,dawran6/zulip,itnihao/zulip,tdr130/zulip,mahim97/zulip,jeffcao/zulip,isht3/zulip,JPJPJPOPOP/zulip,bastianh/zulip,alliejones/zulip,m1ssou/zulip,vaidap/zulip,andersk/zulip,arpitpanwar/zulip,zhaoweigg/zulip,dattatreya303/zulip,littledogboy/zulip,calvinleenyc/zulip,xuxiao/zulip,akuseru/zulip,tdr130/zulip,isht3/zulip,dattatreya303/zulip,pradiptad/zulip,alliejones/zulip,themass/zulip,susansls/zulip,codeKonami/zulip,littledogboy/zulip,technicalpickles/zulip,dawran6/zulip,hengqujushi/zulip,developerfm/zulip,dhcrzf/zulip,hackerkid/zulip,kokoar/zulip,babbage/zulip,kou/zulip,itnihao/zulip,hj3938/zulip,vabs22/zulip,atomic-labs/zulip,fw1121/zulip,praveenaki/zulip,xuxiao/zulip,JPJPJPOPOP/zulip,rishig/zulip,moria/zulip,jphilipsen05/zulip,calvinleenyc/zulip,EasonYi/zulip,JanzTam/zulip,easyfmxu/zulip,zhaoweigg/zulip,grave-w-grave/zulip,swinghu/zulip,aps-sids/zulip,Jianchun1/zulip,joyhchen/zulip,Qgap/zulip,levixie/zulip,christi3k/zulip,wdaher/zulip,levixie/zulip,sharmaeklavya2/zulip,proliming/zulip,zacps/zulip,jessedhillon/zulip,tbutter/zulip,mohsenSy/zulip,ahmadassaf/zulip,bssrdf/zulip,arpitpanwar/zulip,wavelets/zulip,paxapy/zulip,bitemyapp/zulip,souravbadami/zulip,guiquanz/zulip,jimmy54/zulip,nicholasbs/zulip,bluesea/zulip,j831/zulip,jrowan/zulip,KJin99/zulip,Galexrt/zulip,gkotian/zulip,shubhamdhama/zulip,LAndreas/zulip,glovebx/zulip,easyfmxu/zulip,peguin40/zulip,dattatreya303/zulip,ericzhou2008/zulip,glovebx/zulip,deer-hope/zulip,wdaher/zulip,RobotCaleb/zulip,avastu/zulip,eastlhu/zulip,armooo/zulip,guiquanz/zulip,wangdeshui/zulip,MayB/zulip,gkotian/zulip,yuvipanda/zulip,zhaoweigg/zulip,LeeRisk/zulip,karamcnair/zulip,itnihao/zulip,tbutter/zulip,Galexrt/zulip,ryansnowboarder/zulip,niftynei/zulip
|
__revision__ = '$Id: views.py 21 2008-12-05 09:21:03Z jarek.zgoda $'
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.conf import settings
from confirmation.models import Confirmation
def confirm(request, confirmation_key):
confirmation_key = confirmation_key.lower()
obj = Confirmation.objects.confirm(confirmation_key)
confirmed = True
if not obj:
# confirmation failed
confirmed = False
try:
# try to get the object we was supposed to confirm
obj = Confirmation.objects.get(confirmation_key=confirmation_key)
except Confirmation.DoesNotExist:
pass
ctx = {
'object': obj,
'confirmed': confirmed,
'days': getattr(settings, 'EMAIL_CONFIRMATION_DAYS', 10),
+ 'key': confirmation_key,
}
templates = [
'confirmation/confirm.html',
]
if obj:
# if we have an object, we can use specific template
templates.insert(0, 'confirmation/confirm_%s.html' % obj._meta.module_name)
return render_to_response(templates, ctx,
context_instance=RequestContext(request))
|
Include confirmation key in context object.
|
## Code Before:
__revision__ = '$Id: views.py 21 2008-12-05 09:21:03Z jarek.zgoda $'
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.conf import settings
from confirmation.models import Confirmation
def confirm(request, confirmation_key):
confirmation_key = confirmation_key.lower()
obj = Confirmation.objects.confirm(confirmation_key)
confirmed = True
if not obj:
# confirmation failed
confirmed = False
try:
# try to get the object we was supposed to confirm
obj = Confirmation.objects.get(confirmation_key=confirmation_key)
except Confirmation.DoesNotExist:
pass
ctx = {
'object': obj,
'confirmed': confirmed,
'days': getattr(settings, 'EMAIL_CONFIRMATION_DAYS', 10),
}
templates = [
'confirmation/confirm.html',
]
if obj:
# if we have an object, we can use specific template
templates.insert(0, 'confirmation/confirm_%s.html' % obj._meta.module_name)
return render_to_response(templates, ctx,
context_instance=RequestContext(request))
## Instruction:
Include confirmation key in context object.
## Code After:
__revision__ = '$Id: views.py 21 2008-12-05 09:21:03Z jarek.zgoda $'
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.conf import settings
from confirmation.models import Confirmation
def confirm(request, confirmation_key):
confirmation_key = confirmation_key.lower()
obj = Confirmation.objects.confirm(confirmation_key)
confirmed = True
if not obj:
# confirmation failed
confirmed = False
try:
# try to get the object we was supposed to confirm
obj = Confirmation.objects.get(confirmation_key=confirmation_key)
except Confirmation.DoesNotExist:
pass
ctx = {
'object': obj,
'confirmed': confirmed,
'days': getattr(settings, 'EMAIL_CONFIRMATION_DAYS', 10),
'key': confirmation_key,
}
templates = [
'confirmation/confirm.html',
]
if obj:
# if we have an object, we can use specific template
templates.insert(0, 'confirmation/confirm_%s.html' % obj._meta.module_name)
return render_to_response(templates, ctx,
context_instance=RequestContext(request))
|
...
'days': getattr(settings, 'EMAIL_CONFIRMATION_DAYS', 10),
'key': confirmation_key,
}
...
|
c7578896036bc07bb1edc2d79f699968c25ca89e
|
bika/lims/upgrade/to1117.py
|
bika/lims/upgrade/to1117.py
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets')
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets',
run_dependencies=False)
|
Upgrade 1117 - add run_dependencies=False
|
Upgrade 1117 - add run_dependencies=False
Somehow re-importing the 'portlets' step, causes
a beforeDelete handler to fail a HoldingReference
check.
|
Python
|
agpl-3.0
|
labsanmartin/Bika-LIMS,veroc/Bika-LIMS,anneline/Bika-LIMS,veroc/Bika-LIMS,anneline/Bika-LIMS,rockfruit/bika.lims,rockfruit/bika.lims,labsanmartin/Bika-LIMS,anneline/Bika-LIMS,veroc/Bika-LIMS,DeBortoliWines/Bika-LIMS,labsanmartin/Bika-LIMS,DeBortoliWines/Bika-LIMS,DeBortoliWines/Bika-LIMS
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
- setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets')
+ setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets',
+ run_dependencies=False)
|
Upgrade 1117 - add run_dependencies=False
|
## Code Before:
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets')
## Instruction:
Upgrade 1117 - add run_dependencies=False
## Code After:
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets',
run_dependencies=False)
|
# ... existing code ...
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets',
run_dependencies=False)
# ... rest of the code ...
|
d369b2ba967643d16c58fbad0be5b3a24785f602
|
neurodsp/tests/test_spectral_utils.py
|
neurodsp/tests/test_spectral_utils.py
|
"""Test the utility function from spectral."""
import numpy as np
from numpy.testing import assert_equal
from neurodsp.spectral.utils import *
###################################################################################################
###################################################################################################
def test_trim_spectrum():
freqs = np.array([5, 6, 7, 8, 9])
pows = np.array([1, 2, 3, 4, 5])
freqs_new, pows_new = trim_spectrum(freqs, pows, [6, 8])
assert_equal(freqs_new, np.array([6, 7, 8]))
assert_equal(pows_new, np.array([2, 3, 4]))
def test_rotate_powerlaw():
freqs = np.array([5, 6, 7, 8, 9])
pows = np.array([1, 2, 3, 4, 5])
d_exp = 1
pows_new = rotate_powerlaw(freqs, pows, d_exp)
assert pows.shape == pows_new.shape
|
"""Test the utility function from spectral."""
import numpy as np
from numpy.testing import assert_equal
from neurodsp.spectral.utils import *
###################################################################################################
###################################################################################################
def test_trim_spectrum():
freqs = np.array([5, 6, 7, 8, 9])
pows = np.array([1, 2, 3, 4, 5])
freqs_new, pows_new = trim_spectrum(freqs, pows, [6, 8])
assert_equal(freqs_new, np.array([6, 7, 8]))
assert_equal(pows_new, np.array([2, 3, 4]))
def test_trim_spectrogram():
freqs = np.array([5, 6, 7, 8])
times = np.array([0, 1, 2,])
pows = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
[10, 11, 12]])
freqs_new, t_new, pows_new = trim_spectrogram(freqs, times, pows, f_range=[6, 8], t_range=[0,1])
assert_equal(freqs_new, np.array([6, 7, 8]))
assert_equal(t_new, np.array([0, 1]))
assert_equal(pows_new, np.array([[4, 5], [7, 8], [10, 11]]))
def test_rotate_powerlaw():
freqs = np.array([5, 6, 7, 8, 9])
pows = np.array([1, 2, 3, 4, 5])
d_exp = 1
pows_new = rotate_powerlaw(freqs, pows, d_exp)
assert pows.shape == pows_new.shape
|
Add smoke test for trim_spectrogram
|
Add smoke test for trim_spectrogram
|
Python
|
apache-2.0
|
voytekresearch/neurodsp
|
"""Test the utility function from spectral."""
import numpy as np
from numpy.testing import assert_equal
from neurodsp.spectral.utils import *
###################################################################################################
###################################################################################################
def test_trim_spectrum():
freqs = np.array([5, 6, 7, 8, 9])
pows = np.array([1, 2, 3, 4, 5])
freqs_new, pows_new = trim_spectrum(freqs, pows, [6, 8])
assert_equal(freqs_new, np.array([6, 7, 8]))
assert_equal(pows_new, np.array([2, 3, 4]))
+ def test_trim_spectrogram():
+
+ freqs = np.array([5, 6, 7, 8])
+ times = np.array([0, 1, 2,])
+ pows = np.array([[1, 2, 3],
+ [4, 5, 6],
+ [7, 8, 9],
+ [10, 11, 12]])
+
+ freqs_new, t_new, pows_new = trim_spectrogram(freqs, times, pows, f_range=[6, 8], t_range=[0,1])
+ assert_equal(freqs_new, np.array([6, 7, 8]))
+ assert_equal(t_new, np.array([0, 1]))
+ assert_equal(pows_new, np.array([[4, 5], [7, 8], [10, 11]]))
+
def test_rotate_powerlaw():
freqs = np.array([5, 6, 7, 8, 9])
pows = np.array([1, 2, 3, 4, 5])
d_exp = 1
pows_new = rotate_powerlaw(freqs, pows, d_exp)
assert pows.shape == pows_new.shape
|
Add smoke test for trim_spectrogram
|
## Code Before:
"""Test the utility function from spectral."""
import numpy as np
from numpy.testing import assert_equal
from neurodsp.spectral.utils import *
###################################################################################################
###################################################################################################
def test_trim_spectrum():
freqs = np.array([5, 6, 7, 8, 9])
pows = np.array([1, 2, 3, 4, 5])
freqs_new, pows_new = trim_spectrum(freqs, pows, [6, 8])
assert_equal(freqs_new, np.array([6, 7, 8]))
assert_equal(pows_new, np.array([2, 3, 4]))
def test_rotate_powerlaw():
freqs = np.array([5, 6, 7, 8, 9])
pows = np.array([1, 2, 3, 4, 5])
d_exp = 1
pows_new = rotate_powerlaw(freqs, pows, d_exp)
assert pows.shape == pows_new.shape
## Instruction:
Add smoke test for trim_spectrogram
## Code After:
"""Test the utility function from spectral."""
import numpy as np
from numpy.testing import assert_equal
from neurodsp.spectral.utils import *
###################################################################################################
###################################################################################################
def test_trim_spectrum():
freqs = np.array([5, 6, 7, 8, 9])
pows = np.array([1, 2, 3, 4, 5])
freqs_new, pows_new = trim_spectrum(freqs, pows, [6, 8])
assert_equal(freqs_new, np.array([6, 7, 8]))
assert_equal(pows_new, np.array([2, 3, 4]))
def test_trim_spectrogram():
freqs = np.array([5, 6, 7, 8])
times = np.array([0, 1, 2,])
pows = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
[10, 11, 12]])
freqs_new, t_new, pows_new = trim_spectrogram(freqs, times, pows, f_range=[6, 8], t_range=[0,1])
assert_equal(freqs_new, np.array([6, 7, 8]))
assert_equal(t_new, np.array([0, 1]))
assert_equal(pows_new, np.array([[4, 5], [7, 8], [10, 11]]))
def test_rotate_powerlaw():
freqs = np.array([5, 6, 7, 8, 9])
pows = np.array([1, 2, 3, 4, 5])
d_exp = 1
pows_new = rotate_powerlaw(freqs, pows, d_exp)
assert pows.shape == pows_new.shape
|
// ... existing code ...
def test_trim_spectrogram():
freqs = np.array([5, 6, 7, 8])
times = np.array([0, 1, 2,])
pows = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
[10, 11, 12]])
freqs_new, t_new, pows_new = trim_spectrogram(freqs, times, pows, f_range=[6, 8], t_range=[0,1])
assert_equal(freqs_new, np.array([6, 7, 8]))
assert_equal(t_new, np.array([0, 1]))
assert_equal(pows_new, np.array([[4, 5], [7, 8], [10, 11]]))
def test_rotate_powerlaw():
// ... rest of the code ...
|
ea3e327bb602689e136479ce41f568aa2ee47cf4
|
databot/utils/html.py
|
databot/utils/html.py
|
import bs4
import cgi
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
return data['content'].decode(soup.original_encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
|
import bs4
import cgi
def get_page_encoding(soup, default_encoding=None):
for meta in soup.select('head > meta[http-equiv="Content-Type"]'):
content_type, params = cgi.parse_header(meta['content'])
if 'charset' in params:
return params['charset']
return default_encoding
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
encoding = get_page_encoding(soup, soup.original_encoding)
return data['content'].decode(encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
|
Improve detection of page encoding
|
Improve detection of page encoding
|
Python
|
agpl-3.0
|
sirex/databot,sirex/databot
|
import bs4
import cgi
+
+
+ def get_page_encoding(soup, default_encoding=None):
+ for meta in soup.select('head > meta[http-equiv="Content-Type"]'):
+ content_type, params = cgi.parse_header(meta['content'])
+ if 'charset' in params:
+ return params['charset']
+ return default_encoding
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
+ encoding = get_page_encoding(soup, soup.original_encoding)
- return data['content'].decode(soup.original_encoding, errors)
+ return data['content'].decode(encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
|
Improve detection of page encoding
|
## Code Before:
import bs4
import cgi
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
return data['content'].decode(soup.original_encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
## Instruction:
Improve detection of page encoding
## Code After:
import bs4
import cgi
def get_page_encoding(soup, default_encoding=None):
for meta in soup.select('head > meta[http-equiv="Content-Type"]'):
content_type, params = cgi.parse_header(meta['content'])
if 'charset' in params:
return params['charset']
return default_encoding
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
encoding = get_page_encoding(soup, soup.original_encoding)
return data['content'].decode(encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
|
...
import cgi
def get_page_encoding(soup, default_encoding=None):
for meta in soup.select('head > meta[http-equiv="Content-Type"]'):
content_type, params = cgi.parse_header(meta['content'])
if 'charset' in params:
return params['charset']
return default_encoding
...
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
encoding = get_page_encoding(soup, soup.original_encoding)
return data['content'].decode(encoding, errors)
elif content_type.startswith('text/'):
...
|
207d4c71fbc40dd30c0099769d6f12fcb63f826e
|
tests/test_utils.py
|
tests/test_utils.py
|
import subprocess
from pytest import mark
from pytest_benchmark.utils import clonefunc, get_commit_info
f1 = lambda a: a
def f2(a):
return a
@mark.parametrize('f', [f1, f2])
def test_clonefunc(f):
assert clonefunc(f)(1) == f(1)
assert clonefunc(f)(1) == f(1)
def test_clonefunc_not_function():
assert clonefunc(1) == 1
@mark.parametrize('scm', ['git', 'hg'])
def test_get_commit_info(scm, testdir):
subprocess.check_call([scm, 'init', '.'])
if scm == 'git':
subprocess.check_call('git config user.email [email protected]'.split())
subprocess.check_call('git config user.name you'.split())
testdir.makepyfile('asdf')
subprocess.check_call([scm, 'add', 'test_get_commit_info.py'])
subprocess.check_call([scm, 'commit', '-m', 'asdf'])
out = get_commit_info()
assert out.get('dirty') == False
assert 'id' in out
testdir.makepyfile('sadf')
out = get_commit_info()
assert out.get('dirty') == True
assert 'id' in out
|
import subprocess
from pytest import mark
from pytest_benchmark.utils import clonefunc, get_commit_info
pytest_plugins = 'pytester',
f1 = lambda a: a
def f2(a):
return a
@mark.parametrize('f', [f1, f2])
def test_clonefunc(f):
assert clonefunc(f)(1) == f(1)
assert clonefunc(f)(1) == f(1)
def test_clonefunc_not_function():
assert clonefunc(1) == 1
@mark.parametrize('scm', ['git', 'hg'])
def test_get_commit_info(scm, testdir):
subprocess.check_call([scm, 'init', '.'])
if scm == 'git':
subprocess.check_call('git config user.email [email protected]'.split())
subprocess.check_call('git config user.name you'.split())
else:
testdir.tmpdir.join('.hg', 'hgrc').write("""
[ui]
username = you <[email protected]>
""")
testdir.makepyfile('asdf')
subprocess.check_call([scm, 'add', 'test_get_commit_info.py'])
subprocess.check_call([scm, 'commit', '-m', 'asdf'])
out = get_commit_info()
assert out.get('dirty') == False
assert 'id' in out
testdir.makepyfile('sadf')
out = get_commit_info()
assert out.get('dirty') == True
assert 'id' in out
|
Add missing username conf for mercurial.
|
Add missing username conf for mercurial.
|
Python
|
bsd-2-clause
|
thedrow/pytest-benchmark,SectorLabs/pytest-benchmark,ionelmc/pytest-benchmark,aldanor/pytest-benchmark
|
import subprocess
from pytest import mark
from pytest_benchmark.utils import clonefunc, get_commit_info
+
+ pytest_plugins = 'pytester',
f1 = lambda a: a
def f2(a):
return a
@mark.parametrize('f', [f1, f2])
def test_clonefunc(f):
assert clonefunc(f)(1) == f(1)
assert clonefunc(f)(1) == f(1)
def test_clonefunc_not_function():
assert clonefunc(1) == 1
@mark.parametrize('scm', ['git', 'hg'])
def test_get_commit_info(scm, testdir):
subprocess.check_call([scm, 'init', '.'])
if scm == 'git':
subprocess.check_call('git config user.email [email protected]'.split())
subprocess.check_call('git config user.name you'.split())
+ else:
+ testdir.tmpdir.join('.hg', 'hgrc').write("""
+ [ui]
+ username = you <[email protected]>
+ """)
+
testdir.makepyfile('asdf')
subprocess.check_call([scm, 'add', 'test_get_commit_info.py'])
subprocess.check_call([scm, 'commit', '-m', 'asdf'])
out = get_commit_info()
assert out.get('dirty') == False
assert 'id' in out
testdir.makepyfile('sadf')
out = get_commit_info()
assert out.get('dirty') == True
assert 'id' in out
|
Add missing username conf for mercurial.
|
## Code Before:
import subprocess
from pytest import mark
from pytest_benchmark.utils import clonefunc, get_commit_info
f1 = lambda a: a
def f2(a):
return a
@mark.parametrize('f', [f1, f2])
def test_clonefunc(f):
assert clonefunc(f)(1) == f(1)
assert clonefunc(f)(1) == f(1)
def test_clonefunc_not_function():
assert clonefunc(1) == 1
@mark.parametrize('scm', ['git', 'hg'])
def test_get_commit_info(scm, testdir):
subprocess.check_call([scm, 'init', '.'])
if scm == 'git':
subprocess.check_call('git config user.email [email protected]'.split())
subprocess.check_call('git config user.name you'.split())
testdir.makepyfile('asdf')
subprocess.check_call([scm, 'add', 'test_get_commit_info.py'])
subprocess.check_call([scm, 'commit', '-m', 'asdf'])
out = get_commit_info()
assert out.get('dirty') == False
assert 'id' in out
testdir.makepyfile('sadf')
out = get_commit_info()
assert out.get('dirty') == True
assert 'id' in out
## Instruction:
Add missing username conf for mercurial.
## Code After:
import subprocess
from pytest import mark
from pytest_benchmark.utils import clonefunc, get_commit_info
pytest_plugins = 'pytester',
f1 = lambda a: a
def f2(a):
return a
@mark.parametrize('f', [f1, f2])
def test_clonefunc(f):
assert clonefunc(f)(1) == f(1)
assert clonefunc(f)(1) == f(1)
def test_clonefunc_not_function():
assert clonefunc(1) == 1
@mark.parametrize('scm', ['git', 'hg'])
def test_get_commit_info(scm, testdir):
subprocess.check_call([scm, 'init', '.'])
if scm == 'git':
subprocess.check_call('git config user.email [email protected]'.split())
subprocess.check_call('git config user.name you'.split())
else:
testdir.tmpdir.join('.hg', 'hgrc').write("""
[ui]
username = you <[email protected]>
""")
testdir.makepyfile('asdf')
subprocess.check_call([scm, 'add', 'test_get_commit_info.py'])
subprocess.check_call([scm, 'commit', '-m', 'asdf'])
out = get_commit_info()
assert out.get('dirty') == False
assert 'id' in out
testdir.makepyfile('sadf')
out = get_commit_info()
assert out.get('dirty') == True
assert 'id' in out
|
...
from pytest_benchmark.utils import clonefunc, get_commit_info
pytest_plugins = 'pytester',
...
subprocess.check_call('git config user.name you'.split())
else:
testdir.tmpdir.join('.hg', 'hgrc').write("""
[ui]
username = you <[email protected]>
""")
testdir.makepyfile('asdf')
...
|
dabc1f4a869f8da5106248dcf860c75d1fe9f538
|
geotrek/common/management/commands/update_permissions.py
|
geotrek/common/management/commands/update_permissions.py
|
import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
if issubclass(content_type.model_class(), BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
|
import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
model = content_type.model_class()
if model and issubclass(model, BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
|
Fix update_permission command for legacy content types
|
Fix update_permission command for legacy content types
|
Python
|
bsd-2-clause
|
johan--/Geotrek,GeotrekCE/Geotrek-admin,mabhub/Geotrek,Anaethelion/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,johan--/Geotrek,makinacorpus/Geotrek,mabhub/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin
|
import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
+ model = content_type.model_class()
- if issubclass(content_type.model_class(), BasePublishableMixin):
+ if model and issubclass(model, BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
|
Fix update_permission command for legacy content types
|
## Code Before:
import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
if issubclass(content_type.model_class(), BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
## Instruction:
Fix update_permission command for legacy content types
## Code After:
import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
model = content_type.model_class()
if model and issubclass(model, BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
|
...
for content_type in ContentType.objects.all():
model = content_type.model_class()
if model and issubclass(model, BasePublishableMixin):
Permission.objects.get_or_create(
...
|
db0aa94de30d73217f9091635c92f59b8af98ef7
|
alg_sum_list.py
|
alg_sum_list.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_for(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = [0, 1, 2, 3, 4, 5]
start_time = time.time()
print('By for loop: {}'.format(sum_list_for(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
Rename to sum_list_iter() and revise main()'s num_ls
|
Rename to sum_list_iter() and revise main()'s num_ls
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
- def sum_list_for(num_ls):
+ def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
- num_ls = [0, 1, 2, 3, 4, 5]
+ num_ls = range(100)
start_time = time.time()
- print('By for loop: {}'.format(sum_list_for(num_ls)))
+ print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
Rename to sum_list_iter() and revise main()'s num_ls
|
## Code Before:
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_for(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = [0, 1, 2, 3, 4, 5]
start_time = time.time()
print('By for loop: {}'.format(sum_list_for(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
## Instruction:
Rename to sum_list_iter() and revise main()'s num_ls
## Code After:
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
// ... existing code ...
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
// ... modified code ...
num_ls = range(100)
...
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
// ... rest of the code ...
|
db334f19f66a4d842f206696a40ac2d351c774ac
|
Testing/test_Misc.py
|
Testing/test_Misc.py
|
import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
|
import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
# This used to raise an exception.
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
def test_ChangingFunctionDefs(self):
"""
Test whether changing function definitions are handled correctly.
"""
net = Network('test')
net.add_parameter('x', 0.0)
net.add_rate_rule('x', 'f(1)')
net.add_func_def('f', ['x'], 'x+2')
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 30)
# It's not clear to me why this version wasn't causing failures
# before...
#net.remove_component('f')
#net.add_func_def('f', ['x'], 'x+4')
net.functionDefinitions.get('f').math = 'x+4'
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 50)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
|
Add test for bug involving function definitions that Jordan found.
|
Add test for bug involving function definitions that Jordan found.
|
Python
|
bsd-3-clause
|
GutenkunstLab/SloppyCell,GutenkunstLab/SloppyCell
|
import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
+ # This used to raise an exception.
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
+ def test_ChangingFunctionDefs(self):
+ """
+ Test whether changing function definitions are handled correctly.
+ """
+ net = Network('test')
+ net.add_parameter('x', 0.0)
+ net.add_rate_rule('x', 'f(1)')
+ net.add_func_def('f', ['x'], 'x+2')
+ traj = Dynamics.integrate(net, [0, 10])
+ self.assertAlmostEqual(traj.get_var_val('x', 10), 30)
+ # It's not clear to me why this version wasn't causing failures
+ # before...
+ #net.remove_component('f')
+ #net.add_func_def('f', ['x'], 'x+4')
+ net.functionDefinitions.get('f').math = 'x+4'
+ traj = Dynamics.integrate(net, [0, 10])
+ self.assertAlmostEqual(traj.get_var_val('x', 10), 50)
+
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
|
Add test for bug involving function definitions that Jordan found.
|
## Code Before:
import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
## Instruction:
Add test for bug involving function definitions that Jordan found.
## Code After:
import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
# This used to raise an exception.
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
def test_ChangingFunctionDefs(self):
"""
Test whether changing function definitions are handled correctly.
"""
net = Network('test')
net.add_parameter('x', 0.0)
net.add_rate_rule('x', 'f(1)')
net.add_func_def('f', ['x'], 'x+2')
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 30)
# It's not clear to me why this version wasn't causing failures
# before...
#net.remove_component('f')
#net.add_func_def('f', ['x'], 'x+4')
net.functionDefinitions.get('f').math = 'x+4'
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 50)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
|
# ... existing code ...
'None'"""
# This used to raise an exception.
net = base_net.copy('test')
# ... modified code ...
def test_ChangingFunctionDefs(self):
"""
Test whether changing function definitions are handled correctly.
"""
net = Network('test')
net.add_parameter('x', 0.0)
net.add_rate_rule('x', 'f(1)')
net.add_func_def('f', ['x'], 'x+2')
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 30)
# It's not clear to me why this version wasn't causing failures
# before...
#net.remove_component('f')
#net.add_func_def('f', ['x'], 'x+4')
net.functionDefinitions.get('f').math = 'x+4'
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 50)
suite = unittest.makeSuite(test_Misc)
# ... rest of the code ...
|
b5e368437a600d78e22a53abe53c0103b20daa24
|
_python/main/migrations/0003_auto_20191029_2015.py
|
_python/main/migrations/0003_auto_20191029_2015.py
|
from django.db import migrations, models
import main.models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
model_name='contentnode',
name='headnote',
field=main.models.SanitizingTextField(blank=True, null=True),
),
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
migrations.AlterField(
model_name='textblock',
name='content',
field=main.models.SanitizingCharField(max_length=5242880),
),
]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
]
|
Repair migration, which was a no-op in SQL and was 'faked' anyway.
|
Repair migration, which was a no-op in SQL and was 'faked' anyway.
|
Python
|
agpl-3.0
|
harvard-lil/h2o,harvard-lil/h2o,harvard-lil/h2o,harvard-lil/h2o
|
from django.db import migrations, models
- import main.models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
- model_name='contentnode',
- name='headnote',
- field=main.models.SanitizingTextField(blank=True, null=True),
- ),
- migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
- migrations.AlterField(
- model_name='textblock',
- name='content',
- field=main.models.SanitizingCharField(max_length=5242880),
- ),
]
|
Repair migration, which was a no-op in SQL and was 'faked' anyway.
|
## Code Before:
from django.db import migrations, models
import main.models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
model_name='contentnode',
name='headnote',
field=main.models.SanitizingTextField(blank=True, null=True),
),
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
migrations.AlterField(
model_name='textblock',
name='content',
field=main.models.SanitizingCharField(max_length=5242880),
),
]
## Instruction:
Repair migration, which was a no-op in SQL and was 'faked' anyway.
## Code After:
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
]
|
// ... existing code ...
from django.db import migrations, models
// ... modified code ...
migrations.AlterField(
model_name='default',
...
),
]
// ... rest of the code ...
|
23d8942ffeeee72e21330bd8ecc5bfb5e91bbc3b
|
certidude/push.py
|
certidude/push.py
|
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
|
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
if not config.PUSH_PUBLISH:
# Push server disabled
return
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
|
Add fallbacks for e-mail handling if outbox is not defined
|
Add fallbacks for e-mail handling if outbox is not defined
|
Python
|
mit
|
laurivosandi/certidude,laurivosandi/certidude,plaes/certidude,laurivosandi/certidude,plaes/certidude,plaes/certidude,laurivosandi/certidude,plaes/certidude
|
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
+ if not config.PUSH_PUBLISH:
+ # Push server disabled
+ return
+
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
|
Add fallbacks for e-mail handling if outbox is not defined
|
## Code Before:
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
## Instruction:
Add fallbacks for e-mail handling if outbox is not defined
## Code After:
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
if not config.PUSH_PUBLISH:
# Push server disabled
return
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
|
...
"""
if not config.PUSH_PUBLISH:
# Push server disabled
return
if not isinstance(event_data, basestring):
...
|
922c6350fda965068927611348bdd9127ee405d9
|
scaffolder/commands/vcs.py
|
scaffolder/commands/vcs.py
|
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.vcs import VCS
class VcsCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"-u",
"--url",
dest="url",
help='Clone URL, it can be SSH or HTTPS. Git only for now.',
metavar="REPO_URL"
),
make_option(
"-t",
"--target",
dest="target",
default='.',
help="Target directory where the repo will be cloned.",
metavar="TARGET"
),
)
def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
help = 'Command to clone github repos'
aliases = ('git','hg',)
#TODO: Move to BaseCommand, create methods and have each subcommand override
parser = OptionParser(
version=self.get_version(),
option_list=self.get_option_list(),
usage='\n %prog {0} [OPTIONS] FILE...'.format(name),
description='',
epilog=''
)
BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
# self.update_parser()
def update_parser(self):
self.parser.set_usage('%prog [OPTIONS] FILE...')
# self.parser.prog = '%s %s' % (self.parser.get_prog_name(), self.name)
self.parser.version = self.get_version()
self.parser.option_list = sorted(self.get_option_list())
def run(self, *args, **options):
url = options.get('url')
tgt = options.get('target')
boot = VCS(url)
boot.clone(target_dir=tgt)
|
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.vcs import VCS
class VcsCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"-u",
"--url",
dest="url",
help='Clone URL, it can be SSH or HTTPS. Git only for now.',
metavar="REPO_URL"
),
make_option(
"-t",
"--target",
dest="target",
default='.',
help="Target directory where the repo will be cloned.",
metavar="TARGET"
),
)
help = 'Command to clone github repos'
def run(self, *args, **options):
url = options.get('url')
tgt = options.get('target')
boot = VCS(url)
boot.clone(target_dir=tgt)
|
Remove __init__ method, not needed.
|
VcsCommand: Remove __init__ method, not needed.
|
Python
|
mit
|
goliatone/minions
|
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.vcs import VCS
class VcsCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"-u",
"--url",
dest="url",
help='Clone URL, it can be SSH or HTTPS. Git only for now.',
metavar="REPO_URL"
),
make_option(
"-t",
"--target",
dest="target",
default='.',
help="Target directory where the repo will be cloned.",
metavar="TARGET"
),
)
- def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
- help = 'Command to clone github repos'
+ help = 'Command to clone github repos'
- aliases = ('git','hg',)
- #TODO: Move to BaseCommand, create methods and have each subcommand override
- parser = OptionParser(
- version=self.get_version(),
- option_list=self.get_option_list(),
- usage='\n %prog {0} [OPTIONS] FILE...'.format(name),
- description='',
- epilog=''
-
- )
- BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
- # self.update_parser()
-
- def update_parser(self):
- self.parser.set_usage('%prog [OPTIONS] FILE...')
- # self.parser.prog = '%s %s' % (self.parser.get_prog_name(), self.name)
- self.parser.version = self.get_version()
- self.parser.option_list = sorted(self.get_option_list())
def run(self, *args, **options):
url = options.get('url')
tgt = options.get('target')
boot = VCS(url)
boot.clone(target_dir=tgt)
|
Remove __init__ method, not needed.
|
## Code Before:
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.vcs import VCS
class VcsCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"-u",
"--url",
dest="url",
help='Clone URL, it can be SSH or HTTPS. Git only for now.',
metavar="REPO_URL"
),
make_option(
"-t",
"--target",
dest="target",
default='.',
help="Target directory where the repo will be cloned.",
metavar="TARGET"
),
)
def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
help = 'Command to clone github repos'
aliases = ('git','hg',)
#TODO: Move to BaseCommand, create methods and have each subcommand override
parser = OptionParser(
version=self.get_version(),
option_list=self.get_option_list(),
usage='\n %prog {0} [OPTIONS] FILE...'.format(name),
description='',
epilog=''
)
BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
# self.update_parser()
def update_parser(self):
self.parser.set_usage('%prog [OPTIONS] FILE...')
# self.parser.prog = '%s %s' % (self.parser.get_prog_name(), self.name)
self.parser.version = self.get_version()
self.parser.option_list = sorted(self.get_option_list())
def run(self, *args, **options):
url = options.get('url')
tgt = options.get('target')
boot = VCS(url)
boot.clone(target_dir=tgt)
## Instruction:
Remove __init__ method, not needed.
## Code After:
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.vcs import VCS
class VcsCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"-u",
"--url",
dest="url",
help='Clone URL, it can be SSH or HTTPS. Git only for now.',
metavar="REPO_URL"
),
make_option(
"-t",
"--target",
dest="target",
default='.',
help="Target directory where the repo will be cloned.",
metavar="TARGET"
),
)
help = 'Command to clone github repos'
def run(self, *args, **options):
url = options.get('url')
tgt = options.get('target')
boot = VCS(url)
boot.clone(target_dir=tgt)
|
// ... existing code ...
help = 'Command to clone github repos'
// ... rest of the code ...
|
812f1fec796e4c7d86731d5e3e91293fb1b0296b
|
scripts/europeana-meta.py
|
scripts/europeana-meta.py
|
from __future__ import print_function
import sys, os
from re import sub
import zipfile, json
# from pyspark import SparkContext
# from pyspark.sql import SQLContext
# from pyspark.sql import Row
# from pyspark.sql.types import StringType
def getSeries(fname):
with zipfile.ZipFile(fname, 'r') as zf:
names = zf.namelist()
mfile = [f for f in names if f.endswith('.metadata.json')]
series = fname
if len(mfile) > 0:
m = json.loads(zf.read(mfile[0]))
series = m['identifier'][0]
return m
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: europeana.py <input> <output>", file=sys.stderr)
exit(-1)
# sc = SparkContext(appName="Europeana Import")
# sqlContext = SQLContext(sc)
x = [os.path.join(d[0], f) for d in os.walk(sys.argv[1]) for f in d[2] if f.endswith('zip')]
for f in x:
print(json.dumps(getSeries(f)))
# sc.parallelize(x, 200).flatMap(getSeries).toDF().write.save(sys.argv[2])
# sc.stop()
|
from __future__ import print_function
import sys, os
from re import sub
import zipfile, json
# from pyspark import SparkContext
# from pyspark.sql import SQLContext
# from pyspark.sql import Row
# from pyspark.sql.types import StringType
def getSeries(fname):
with zipfile.ZipFile(fname, 'r') as zf:
names = zf.namelist()
mfile = [f for f in names if f.endswith('.metadata.json')]
series = 'europeana/' + sub('^.*newspapers-by-country/', '',
sub('[\x80-\xff]', '', fname).replace('.zip', ''))
if len(mfile) > 0:
m = json.loads(zf.read(mfile[0]))
return {'series': series, 'title': m['title'][0], 'lang': m['language']}
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: europeana.py <input> <output>", file=sys.stderr)
exit(-1)
# sc = SparkContext(appName="Europeana Import")
# sqlContext = SQLContext(sc)
x = [os.path.join(d[0], f) for d in os.walk(sys.argv[1]) for f in d[2] if f.endswith('zip')]
for f in x:
print(json.dumps(getSeries(f)))
# sc.parallelize(x, 200).flatMap(getSeries).toDF().write.save(sys.argv[2])
# sc.stop()
|
Use file path as Europeana series name.
|
Use file path as Europeana series name.
|
Python
|
apache-2.0
|
ViralTexts/vt-passim,ViralTexts/vt-passim,ViralTexts/vt-passim
|
from __future__ import print_function
import sys, os
from re import sub
import zipfile, json
# from pyspark import SparkContext
# from pyspark.sql import SQLContext
# from pyspark.sql import Row
# from pyspark.sql.types import StringType
def getSeries(fname):
with zipfile.ZipFile(fname, 'r') as zf:
names = zf.namelist()
mfile = [f for f in names if f.endswith('.metadata.json')]
- series = fname
+ series = 'europeana/' + sub('^.*newspapers-by-country/', '',
+ sub('[\x80-\xff]', '', fname).replace('.zip', ''))
if len(mfile) > 0:
m = json.loads(zf.read(mfile[0]))
+ return {'series': series, 'title': m['title'][0], 'lang': m['language']}
- series = m['identifier'][0]
- return m
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: europeana.py <input> <output>", file=sys.stderr)
exit(-1)
# sc = SparkContext(appName="Europeana Import")
# sqlContext = SQLContext(sc)
x = [os.path.join(d[0], f) for d in os.walk(sys.argv[1]) for f in d[2] if f.endswith('zip')]
for f in x:
print(json.dumps(getSeries(f)))
# sc.parallelize(x, 200).flatMap(getSeries).toDF().write.save(sys.argv[2])
# sc.stop()
|
Use file path as Europeana series name.
|
## Code Before:
from __future__ import print_function
import sys, os
from re import sub
import zipfile, json
# from pyspark import SparkContext
# from pyspark.sql import SQLContext
# from pyspark.sql import Row
# from pyspark.sql.types import StringType
def getSeries(fname):
with zipfile.ZipFile(fname, 'r') as zf:
names = zf.namelist()
mfile = [f for f in names if f.endswith('.metadata.json')]
series = fname
if len(mfile) > 0:
m = json.loads(zf.read(mfile[0]))
series = m['identifier'][0]
return m
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: europeana.py <input> <output>", file=sys.stderr)
exit(-1)
# sc = SparkContext(appName="Europeana Import")
# sqlContext = SQLContext(sc)
x = [os.path.join(d[0], f) for d in os.walk(sys.argv[1]) for f in d[2] if f.endswith('zip')]
for f in x:
print(json.dumps(getSeries(f)))
# sc.parallelize(x, 200).flatMap(getSeries).toDF().write.save(sys.argv[2])
# sc.stop()
## Instruction:
Use file path as Europeana series name.
## Code After:
from __future__ import print_function
import sys, os
from re import sub
import zipfile, json
# from pyspark import SparkContext
# from pyspark.sql import SQLContext
# from pyspark.sql import Row
# from pyspark.sql.types import StringType
def getSeries(fname):
with zipfile.ZipFile(fname, 'r') as zf:
names = zf.namelist()
mfile = [f for f in names if f.endswith('.metadata.json')]
series = 'europeana/' + sub('^.*newspapers-by-country/', '',
sub('[\x80-\xff]', '', fname).replace('.zip', ''))
if len(mfile) > 0:
m = json.loads(zf.read(mfile[0]))
return {'series': series, 'title': m['title'][0], 'lang': m['language']}
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: europeana.py <input> <output>", file=sys.stderr)
exit(-1)
# sc = SparkContext(appName="Europeana Import")
# sqlContext = SQLContext(sc)
x = [os.path.join(d[0], f) for d in os.walk(sys.argv[1]) for f in d[2] if f.endswith('zip')]
for f in x:
print(json.dumps(getSeries(f)))
# sc.parallelize(x, 200).flatMap(getSeries).toDF().write.save(sys.argv[2])
# sc.stop()
|
# ... existing code ...
mfile = [f for f in names if f.endswith('.metadata.json')]
series = 'europeana/' + sub('^.*newspapers-by-country/', '',
sub('[\x80-\xff]', '', fname).replace('.zip', ''))
if len(mfile) > 0:
# ... modified code ...
m = json.loads(zf.read(mfile[0]))
return {'series': series, 'title': m['title'][0], 'lang': m['language']}
# ... rest of the code ...
|
b7bafa86cf6e2f568e99335fa6aeb6d8f3509170
|
dont_tread_on_memes/__init__.py
|
dont_tread_on_memes/__init__.py
|
import os
from PIL import Image, ImageDraw, ImageFont
localdir = os.path.dirname(__file__)
BLANK_FLAG = Image.open(os.path.join(localdir, "dont-tread-on-blank.png"))
LORA_FONT = ImageFont.truetype(
os.path.join(localdir, "../fonts/Lora-Regular.ttf"), 120
)
def tread_on(caption, color="black"):
"""Caption the "Don't Tread on Me" snake with `caption`"""
flag = BLANK_FLAG.copy()
draw = ImageDraw.Draw(flag)
text = caption.upper()
font_pos = (flag.width / 2 - LORA_FONT.getsize(text)[0] / 2, 1088)
draw.text(font_pos, text, font=LORA_FONT, fill=color)
return flag
def dont_me(phrase):
"""Caption the "Don't tread on me" flag with "Don't [phrase] me" """
return tread_on("don't {} me".format(phrase))
|
import os
from PIL import Image, ImageDraw, ImageFont
localdir = os.path.dirname(__file__)
BLANK_FLAG = Image.open(os.path.join(localdir, "dont-tread-on-blank.png"))
LORA_FONT = ImageFont.truetype(
os.path.join(localdir, "../fonts/Lora-Regular.ttf"), 120
)
def tread_on(caption, color="black"):
"""Caption the "Don't Tread on Me" snake with `caption`"""
flag = BLANK_FLAG.copy()
draw = ImageDraw.Draw(flag)
text = caption.upper()
font_pos = (flag.width / 2 - LORA_FONT.getsize(text)[0] / 2, 1088)
draw.text(font_pos, text, font=LORA_FONT, fill=color)
return flag
def dont_me(phrase, *args, **kwargs):
"""Caption the "Don't tread on me" flag with "Don't [phrase] me" """
return tread_on("don't {} me".format(phrase), *args, **kwargs)
|
Allow passing arguments through dont_me to tread_on
|
Allow passing arguments through dont_me to tread_on
|
Python
|
mit
|
controversial/dont-tread-on-memes
|
import os
from PIL import Image, ImageDraw, ImageFont
localdir = os.path.dirname(__file__)
BLANK_FLAG = Image.open(os.path.join(localdir, "dont-tread-on-blank.png"))
LORA_FONT = ImageFont.truetype(
os.path.join(localdir, "../fonts/Lora-Regular.ttf"), 120
)
def tread_on(caption, color="black"):
"""Caption the "Don't Tread on Me" snake with `caption`"""
flag = BLANK_FLAG.copy()
draw = ImageDraw.Draw(flag)
text = caption.upper()
font_pos = (flag.width / 2 - LORA_FONT.getsize(text)[0] / 2, 1088)
draw.text(font_pos, text, font=LORA_FONT, fill=color)
return flag
- def dont_me(phrase):
+ def dont_me(phrase, *args, **kwargs):
"""Caption the "Don't tread on me" flag with "Don't [phrase] me" """
- return tread_on("don't {} me".format(phrase))
+ return tread_on("don't {} me".format(phrase), *args, **kwargs)
|
Allow passing arguments through dont_me to tread_on
|
## Code Before:
import os
from PIL import Image, ImageDraw, ImageFont
localdir = os.path.dirname(__file__)
BLANK_FLAG = Image.open(os.path.join(localdir, "dont-tread-on-blank.png"))
LORA_FONT = ImageFont.truetype(
os.path.join(localdir, "../fonts/Lora-Regular.ttf"), 120
)
def tread_on(caption, color="black"):
"""Caption the "Don't Tread on Me" snake with `caption`"""
flag = BLANK_FLAG.copy()
draw = ImageDraw.Draw(flag)
text = caption.upper()
font_pos = (flag.width / 2 - LORA_FONT.getsize(text)[0] / 2, 1088)
draw.text(font_pos, text, font=LORA_FONT, fill=color)
return flag
def dont_me(phrase):
"""Caption the "Don't tread on me" flag with "Don't [phrase] me" """
return tread_on("don't {} me".format(phrase))
## Instruction:
Allow passing arguments through dont_me to tread_on
## Code After:
import os
from PIL import Image, ImageDraw, ImageFont
localdir = os.path.dirname(__file__)
BLANK_FLAG = Image.open(os.path.join(localdir, "dont-tread-on-blank.png"))
LORA_FONT = ImageFont.truetype(
os.path.join(localdir, "../fonts/Lora-Regular.ttf"), 120
)
def tread_on(caption, color="black"):
"""Caption the "Don't Tread on Me" snake with `caption`"""
flag = BLANK_FLAG.copy()
draw = ImageDraw.Draw(flag)
text = caption.upper()
font_pos = (flag.width / 2 - LORA_FONT.getsize(text)[0] / 2, 1088)
draw.text(font_pos, text, font=LORA_FONT, fill=color)
return flag
def dont_me(phrase, *args, **kwargs):
"""Caption the "Don't tread on me" flag with "Don't [phrase] me" """
return tread_on("don't {} me".format(phrase), *args, **kwargs)
|
...
def dont_me(phrase, *args, **kwargs):
"""Caption the "Don't tread on me" flag with "Don't [phrase] me" """
return tread_on("don't {} me".format(phrase), *args, **kwargs)
...
|
16c1352ecf8583615e482c431ec5183fdb718f67
|
split_file.py
|
split_file.py
|
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use."""
return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))
|
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def merge_quotations(statements):
"""If there are an odd number of "s in a statement, assume that we
broke the middle of a string. We recombine that string."""
cur = None
for i in statements:
if i.count('"') % 2 != 0:
if cur is None:
cur = i
else:
yield (cur + ' ' + i)
cur = None
elif cur is None:
yield i
else:
cur += ' ' + i
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use.
We additionally merge lines inside of quotations."""
return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
|
Make splitting more robust to periods in strings
|
Make splitting more robust to periods in strings
|
Python
|
mit
|
JasonGross/coq-tools,JasonGross/coq-tools
|
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
+
+ def merge_quotations(statements):
+ """If there are an odd number of "s in a statement, assume that we
+ broke the middle of a string. We recombine that string."""
+
+ cur = None
+ for i in statements:
+ if i.count('"') % 2 != 0:
+ if cur is None:
+ cur = i
+ else:
+ yield (cur + ' ' + i)
+ cur = None
+ elif cur is None:
+ yield i
+ else:
+ cur += ' ' + i
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
- the one that ProofGeneral and CoqIDE use."""
+ the one that ProofGeneral and CoqIDE use.
- return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))
+ We additionally merge lines inside of quotations."""
+ return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
+
|
Make splitting more robust to periods in strings
|
## Code Before:
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use."""
return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))
## Instruction:
Make splitting more robust to periods in strings
## Code After:
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def merge_quotations(statements):
"""If there are an odd number of "s in a statement, assume that we
broke the middle of a string. We recombine that string."""
cur = None
for i in statements:
if i.count('"') % 2 != 0:
if cur is None:
cur = i
else:
yield (cur + ' ' + i)
cur = None
elif cur is None:
yield i
else:
cur += ' ' + i
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use.
We additionally merge lines inside of quotations."""
return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
|
...
__all__ = ["split_coq_file_contents"]
def merge_quotations(statements):
"""If there are an odd number of "s in a statement, assume that we
broke the middle of a string. We recombine that string."""
cur = None
for i in statements:
if i.count('"') % 2 != 0:
if cur is None:
cur = i
else:
yield (cur + ' ' + i)
cur = None
elif cur is None:
yield i
else:
cur += ' ' + i
...
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use.
We additionally merge lines inside of quotations."""
return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
...
|
77122e472c3688f96e77b4f39e9767fed0fb53ae
|
generate_from_template.py
|
generate_from_template.py
|
from __future__ import print_function
import os.path
import sys
import json
import uuid
root = sys.path[0]
template_path = os.path.join(root, 'templates', 'simple.json')
with open(template_path) as template:
oyster = json.load(template)
new_id = str(uuid.uuid4())
new_filename = new_id + '.json'
new_filepath = os.path.join(root, 'cmdoysters', new_filename)
with open(new_filepath, 'w') as new_file:
oyster['uuid'] = new_id
json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True)
print('Created new CmdOyster:\n{}'.format(new_filepath))
|
import os.path
import sys
import json
import uuid
root = sys.path[0]
template_path = os.path.join(root, 'templates', 'simple.json')
with open(template_path) as template:
oyster = json.load(template)
new_id = str(uuid.uuid4())
new_filename = new_id + '.json'
new_filepath = os.path.join(root, 'cmdoysters', new_filename)
with open(new_filepath, 'w') as new_file:
oyster['uuid'] = new_id
json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True)
sys.stdout.write('{}\n'.format(new_filepath))
|
Make output terse and parseable.
|
Make output terse and parseable.
|
Python
|
mit
|
nbeaver/cmd-oysters,nbeaver/cmd-oysters
|
- from __future__ import print_function
import os.path
import sys
import json
import uuid
root = sys.path[0]
template_path = os.path.join(root, 'templates', 'simple.json')
with open(template_path) as template:
oyster = json.load(template)
new_id = str(uuid.uuid4())
new_filename = new_id + '.json'
new_filepath = os.path.join(root, 'cmdoysters', new_filename)
with open(new_filepath, 'w') as new_file:
oyster['uuid'] = new_id
json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True)
- print('Created new CmdOyster:\n{}'.format(new_filepath))
+ sys.stdout.write('{}\n'.format(new_filepath))
|
Make output terse and parseable.
|
## Code Before:
from __future__ import print_function
import os.path
import sys
import json
import uuid
root = sys.path[0]
template_path = os.path.join(root, 'templates', 'simple.json')
with open(template_path) as template:
oyster = json.load(template)
new_id = str(uuid.uuid4())
new_filename = new_id + '.json'
new_filepath = os.path.join(root, 'cmdoysters', new_filename)
with open(new_filepath, 'w') as new_file:
oyster['uuid'] = new_id
json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True)
print('Created new CmdOyster:\n{}'.format(new_filepath))
## Instruction:
Make output terse and parseable.
## Code After:
import os.path
import sys
import json
import uuid
root = sys.path[0]
template_path = os.path.join(root, 'templates', 'simple.json')
with open(template_path) as template:
oyster = json.load(template)
new_id = str(uuid.uuid4())
new_filename = new_id + '.json'
new_filepath = os.path.join(root, 'cmdoysters', new_filename)
with open(new_filepath, 'w') as new_file:
oyster['uuid'] = new_id
json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True)
sys.stdout.write('{}\n'.format(new_filepath))
|
# ... existing code ...
# ... modified code ...
json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True)
sys.stdout.write('{}\n'.format(new_filepath))
# ... rest of the code ...
|
784b165d67550cd159b05aabfd2872ebc746a9e2
|
pants/views.py
|
pants/views.py
|
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def is_authenticated(request):
"""Validates that an user is authenticated and extracts its userid"""
request.validated['userid'] = 'n1k0';
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create')
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
token = request.token_manager.make_token({
"userid": request.validated['userid'],
})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
|
from pyramid.security import Allow, Authenticated, authenticated_userid
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def acl(request):
return [(Allow, Authenticated, 'create-callurl')]
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create-callurl', acl=acl)
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
userid = authenticated_userid(request)
token = request.token_manager.make_token({"userid": userid})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
|
Implement ACL for call url creation
|
Implement ACL for call url creation
|
Python
|
mpl-2.0
|
ametaireau/pants-server,almet/pants-server
|
+ from pyramid.security import Allow, Authenticated, authenticated_userid
+
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
+ def acl(request):
+ return [(Allow, Authenticated, 'create-callurl')]
- def is_authenticated(request):
- """Validates that an user is authenticated and extracts its userid"""
- request.validated['userid'] = 'n1k0';
-
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
-
- @callurl.post(permission='create')
+ @callurl.post(permission='create-callurl', acl=acl)
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
+ userid = authenticated_userid(request)
- token = request.token_manager.make_token({
+ token = request.token_manager.make_token({"userid": userid})
- "userid": request.validated['userid'],
- })
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
|
Implement ACL for call url creation
|
## Code Before:
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def is_authenticated(request):
"""Validates that an user is authenticated and extracts its userid"""
request.validated['userid'] = 'n1k0';
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create')
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
token = request.token_manager.make_token({
"userid": request.validated['userid'],
})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
## Instruction:
Implement ACL for call url creation
## Code After:
from pyramid.security import Allow, Authenticated, authenticated_userid
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def acl(request):
return [(Allow, Authenticated, 'create-callurl')]
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create-callurl', acl=acl)
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
userid = authenticated_userid(request)
token = request.token_manager.make_token({"userid": userid})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
|
...
from pyramid.security import Allow, Authenticated, authenticated_userid
from cornice import Service
...
def acl(request):
return [(Allow, Authenticated, 'create-callurl')]
...
@callurl.post(permission='create-callurl', acl=acl)
def generate_callurl(request):
...
"""
userid = authenticated_userid(request)
token = request.token_manager.make_token({"userid": userid})
call_url = '{root}/call/{token}'.format(root=request.application_url,
...
|
b394f79132d952be20baf15725715691ace69ced
|
web/slas-web/web/urls.py
|
web/slas-web/web/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^general/', include('general.urls', namespace='general')),
url(r'^apache/', include('apache.urls', namespace='apache')),
url(r'^bash/', include('bash.urls', namespace='bash')),
url(r'^admin/', include(admin.site.urls)),
# index
url(r'^$', 'general.views.status', name='index'),
url(r'^user/login/$', 'web.views.user_login'),
url(r'^user/auth$', 'web.views.user_auth'),
url(r'^user/logout/$', 'web.views.user_logout'),
url(r'^user/invalid_login/$', 'web.views.user_invalid_login'),
]
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^general/', include('general.urls', namespace='general')),
url(r'^apache/', include('apache.urls', namespace='apache')),
url(r'^bash/', include('bash.urls', namespace='bash')),
url(r'^admin/', include(admin.site.urls)),
# index
url(r'^$', 'general.views.status', name='index'),
url(r'^user/login/$', 'web.views.user_login'),
url(r'^user/auth$', 'web.views.user_auth'),
url(r'^user/logout/$', 'web.views.user_logout'),
url(r'^user/invalid_login/$', 'web.views.user_invalid_login'),
]
admin.site.site_header = 'SLAS web module administration tool'
|
Change web admin page title
|
Change web admin page title
|
Python
|
mit
|
chyla/slas,chyla/pat-lms,chyla/slas,chyla/pat-lms,chyla/slas,chyla/pat-lms,chyla/slas,chyla/slas,chyla/pat-lms,chyla/pat-lms,chyla/slas,chyla/pat-lms,chyla/slas,chyla/pat-lms
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^general/', include('general.urls', namespace='general')),
url(r'^apache/', include('apache.urls', namespace='apache')),
url(r'^bash/', include('bash.urls', namespace='bash')),
url(r'^admin/', include(admin.site.urls)),
# index
url(r'^$', 'general.views.status', name='index'),
url(r'^user/login/$', 'web.views.user_login'),
url(r'^user/auth$', 'web.views.user_auth'),
url(r'^user/logout/$', 'web.views.user_logout'),
url(r'^user/invalid_login/$', 'web.views.user_invalid_login'),
]
+ admin.site.site_header = 'SLAS web module administration tool'
+
|
Change web admin page title
|
## Code Before:
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^general/', include('general.urls', namespace='general')),
url(r'^apache/', include('apache.urls', namespace='apache')),
url(r'^bash/', include('bash.urls', namespace='bash')),
url(r'^admin/', include(admin.site.urls)),
# index
url(r'^$', 'general.views.status', name='index'),
url(r'^user/login/$', 'web.views.user_login'),
url(r'^user/auth$', 'web.views.user_auth'),
url(r'^user/logout/$', 'web.views.user_logout'),
url(r'^user/invalid_login/$', 'web.views.user_invalid_login'),
]
## Instruction:
Change web admin page title
## Code After:
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^general/', include('general.urls', namespace='general')),
url(r'^apache/', include('apache.urls', namespace='apache')),
url(r'^bash/', include('bash.urls', namespace='bash')),
url(r'^admin/', include(admin.site.urls)),
# index
url(r'^$', 'general.views.status', name='index'),
url(r'^user/login/$', 'web.views.user_login'),
url(r'^user/auth$', 'web.views.user_auth'),
url(r'^user/logout/$', 'web.views.user_logout'),
url(r'^user/invalid_login/$', 'web.views.user_invalid_login'),
]
admin.site.site_header = 'SLAS web module administration tool'
|
# ... existing code ...
]
admin.site.site_header = 'SLAS web module administration tool'
# ... rest of the code ...
|
380baa34af7e8a704780f0ec535b626f4a286e23
|
deflect/admin.py
|
deflect/admin.py
|
from django.contrib import admin
from .models import RedirectURL
class RedirectURLAdmin(admin.ModelAdmin):
list_display = ('url', 'short_url', 'hits', 'last_used', 'creator', 'campaign', 'medium',)
list_filter = ('creator__username', 'campaign', 'medium',)
ordering = ('-last_used',)
readonly_fields = ('created', 'short_url', 'qr_code', 'hits', 'last_used',)
search_fields = ['url', 'campaign']
fieldsets = ((None, {'fields': ('url', 'short_url',)}),
('Google', {'fields': ('campaign', 'medium', 'content',)}),
('Additional info', {'fields': ('description', 'qr_code',)}),
('Short URL Usage', {'fields': ('hits', 'created', 'last_used',)}),)
def save_model(self, request, obj, form, change):
obj.user = request.user
obj.save()
admin.site.register(RedirectURL, RedirectURLAdmin)
|
from django.contrib import admin
from .models import RedirectURL
class RedirectURLAdmin(admin.ModelAdmin):
list_display = ('url', 'short_url', 'hits', 'last_used', 'creator', 'campaign', 'medium',)
list_filter = ('creator__username', 'campaign', 'medium',)
ordering = ('-last_used',)
readonly_fields = ('created', 'short_url', 'qr_code', 'hits', 'last_used',)
search_fields = ['url', 'campaign']
fieldsets = ((None, {'fields': ('url', 'short_url',)}),
('Google', {'fields': ('campaign', 'medium', 'content',)}),
('Additional info', {'fields': ('description', 'qr_code',)}),
('Short URL Usage', {'fields': ('hits', 'created', 'last_used',)}),)
def save_model(self, request, obj, form, change):
if not change:
obj.creator = request.user
obj.save()
admin.site.register(RedirectURL, RedirectURLAdmin)
|
Fix model creator updating on change event
|
Fix model creator updating on change event
|
Python
|
bsd-3-clause
|
jbittel/django-deflect
|
from django.contrib import admin
from .models import RedirectURL
class RedirectURLAdmin(admin.ModelAdmin):
list_display = ('url', 'short_url', 'hits', 'last_used', 'creator', 'campaign', 'medium',)
list_filter = ('creator__username', 'campaign', 'medium',)
ordering = ('-last_used',)
readonly_fields = ('created', 'short_url', 'qr_code', 'hits', 'last_used',)
search_fields = ['url', 'campaign']
fieldsets = ((None, {'fields': ('url', 'short_url',)}),
('Google', {'fields': ('campaign', 'medium', 'content',)}),
('Additional info', {'fields': ('description', 'qr_code',)}),
('Short URL Usage', {'fields': ('hits', 'created', 'last_used',)}),)
def save_model(self, request, obj, form, change):
+ if not change:
- obj.user = request.user
+ obj.creator = request.user
obj.save()
admin.site.register(RedirectURL, RedirectURLAdmin)
|
Fix model creator updating on change event
|
## Code Before:
from django.contrib import admin
from .models import RedirectURL
class RedirectURLAdmin(admin.ModelAdmin):
list_display = ('url', 'short_url', 'hits', 'last_used', 'creator', 'campaign', 'medium',)
list_filter = ('creator__username', 'campaign', 'medium',)
ordering = ('-last_used',)
readonly_fields = ('created', 'short_url', 'qr_code', 'hits', 'last_used',)
search_fields = ['url', 'campaign']
fieldsets = ((None, {'fields': ('url', 'short_url',)}),
('Google', {'fields': ('campaign', 'medium', 'content',)}),
('Additional info', {'fields': ('description', 'qr_code',)}),
('Short URL Usage', {'fields': ('hits', 'created', 'last_used',)}),)
def save_model(self, request, obj, form, change):
obj.user = request.user
obj.save()
admin.site.register(RedirectURL, RedirectURLAdmin)
## Instruction:
Fix model creator updating on change event
## Code After:
from django.contrib import admin
from .models import RedirectURL
class RedirectURLAdmin(admin.ModelAdmin):
list_display = ('url', 'short_url', 'hits', 'last_used', 'creator', 'campaign', 'medium',)
list_filter = ('creator__username', 'campaign', 'medium',)
ordering = ('-last_used',)
readonly_fields = ('created', 'short_url', 'qr_code', 'hits', 'last_used',)
search_fields = ['url', 'campaign']
fieldsets = ((None, {'fields': ('url', 'short_url',)}),
('Google', {'fields': ('campaign', 'medium', 'content',)}),
('Additional info', {'fields': ('description', 'qr_code',)}),
('Short URL Usage', {'fields': ('hits', 'created', 'last_used',)}),)
def save_model(self, request, obj, form, change):
if not change:
obj.creator = request.user
obj.save()
admin.site.register(RedirectURL, RedirectURLAdmin)
|
# ... existing code ...
def save_model(self, request, obj, form, change):
if not change:
obj.creator = request.user
obj.save()
# ... rest of the code ...
|
71ce7f3e745b9cee357f867f126dce65f6e210ac
|
main.py
|
main.py
|
import os
import sys
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom'))
import pygame
import math
from world import Universe
# Initialize pygame
pygame.init()
size = width, height = 800, 600
black = 0, 0, 0
screen = pygame.display.set_mode(size)
clock = pygame.time.Clock()
clock.tick(30)
# Initialize the universe
universe = Universe(screen)
universe.add_nest(100, 100, math.pi / 2, 30.0)
universe.add_hole(600, 100)
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
screen.fill(black)
universe.update(clock.get_time())
universe.draw()
pygame.display.flip()
clock.tick(30)
|
import os
import sys
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom'))
import pygame
import math
from world import Universe
# Initialize pygame
pygame.init()
size = width, height = 800, 600
black = 0, 0, 0
screen = pygame.display.set_mode(size)
clock = pygame.time.Clock()
clock.tick(30)
# Initialize the universe
universe = Universe(screen)
universe.add_road((
(100, 100),
(150, 300),
(250, 500),
(400, 500),
(700, 200),
))
universe.add_nest(100, 100, math.pi / 2, 30.0)
universe.add_hole(600, 100)
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
screen.fill(black)
universe.update(clock.get_time())
universe.draw()
pygame.display.flip()
clock.tick(30)
|
Add more roads on map
|
Add more roads on map
|
Python
|
mit
|
thibault/vroom
|
import os
import sys
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom'))
import pygame
import math
from world import Universe
# Initialize pygame
pygame.init()
size = width, height = 800, 600
black = 0, 0, 0
screen = pygame.display.set_mode(size)
clock = pygame.time.Clock()
clock.tick(30)
# Initialize the universe
universe = Universe(screen)
+ universe.add_road((
+ (100, 100),
+ (150, 300),
+ (250, 500),
+ (400, 500),
+ (700, 200),
+ ))
universe.add_nest(100, 100, math.pi / 2, 30.0)
universe.add_hole(600, 100)
+
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
screen.fill(black)
universe.update(clock.get_time())
universe.draw()
pygame.display.flip()
clock.tick(30)
|
Add more roads on map
|
## Code Before:
import os
import sys
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom'))
import pygame
import math
from world import Universe
# Initialize pygame
pygame.init()
size = width, height = 800, 600
black = 0, 0, 0
screen = pygame.display.set_mode(size)
clock = pygame.time.Clock()
clock.tick(30)
# Initialize the universe
universe = Universe(screen)
universe.add_nest(100, 100, math.pi / 2, 30.0)
universe.add_hole(600, 100)
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
screen.fill(black)
universe.update(clock.get_time())
universe.draw()
pygame.display.flip()
clock.tick(30)
## Instruction:
Add more roads on map
## Code After:
import os
import sys
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom'))
import pygame
import math
from world import Universe
# Initialize pygame
pygame.init()
size = width, height = 800, 600
black = 0, 0, 0
screen = pygame.display.set_mode(size)
clock = pygame.time.Clock()
clock.tick(30)
# Initialize the universe
universe = Universe(screen)
universe.add_road((
(100, 100),
(150, 300),
(250, 500),
(400, 500),
(700, 200),
))
universe.add_nest(100, 100, math.pi / 2, 30.0)
universe.add_hole(600, 100)
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
screen.fill(black)
universe.update(clock.get_time())
universe.draw()
pygame.display.flip()
clock.tick(30)
|
# ... existing code ...
universe = Universe(screen)
universe.add_road((
(100, 100),
(150, 300),
(250, 500),
(400, 500),
(700, 200),
))
universe.add_nest(100, 100, math.pi / 2, 30.0)
# ... modified code ...
universe.add_hole(600, 100)
# ... rest of the code ...
|
8f4c8760dd5f6f21b1c59579332a3c81fa58ed13
|
buildlet/runner/__init__.py
|
buildlet/runner/__init__.py
|
_namemodmap = dict(
SimpleRunner='simple',
IPythonParallelRunner='ipythonparallel',
MultiprocessingRunner='multiprocessingpool',
)
def getrunner(classname):
import sys
module = 'buildlet.runner.{0}'.format(_namemodmap[classname])
__import__(module)
return getattr(sys.modules[module], classname)
def listrunner():
return list(_namemodmap)
def run(classname, task, *args, **kwds):
runner = getrunner(classname)(*args, **kwds)
runner.run(task)
return runner
|
_namemodmap = dict(
SimpleRunner='simple',
IPythonParallelRunner='ipythonparallel',
MultiprocessingRunner='multiprocessingpool',
)
def getrunner(classname):
"""
Get a runner class named `classname`.
"""
import sys
module = 'buildlet.runner.{0}'.format(_namemodmap[classname])
__import__(module)
return getattr(sys.modules[module], classname)
def listrunner():
"""
Get a list of runner class names (a list of strings).
"""
return list(_namemodmap)
def run(classname, task, *args, **kwds):
"""
Run `task` using runner named `classname`.
Rest of the arguments are passed to the runner class.
Return the instance of the used runner class.
"""
runner = getrunner(classname)(*args, **kwds)
runner.run(task)
return runner
|
Document utility functions in buildlet.runner
|
Document utility functions in buildlet.runner
|
Python
|
bsd-3-clause
|
tkf/buildlet
|
_namemodmap = dict(
SimpleRunner='simple',
IPythonParallelRunner='ipythonparallel',
MultiprocessingRunner='multiprocessingpool',
)
def getrunner(classname):
+ """
+ Get a runner class named `classname`.
+ """
import sys
module = 'buildlet.runner.{0}'.format(_namemodmap[classname])
__import__(module)
return getattr(sys.modules[module], classname)
def listrunner():
+ """
+ Get a list of runner class names (a list of strings).
+ """
return list(_namemodmap)
def run(classname, task, *args, **kwds):
+ """
+ Run `task` using runner named `classname`.
+
+ Rest of the arguments are passed to the runner class.
+ Return the instance of the used runner class.
+
+ """
runner = getrunner(classname)(*args, **kwds)
runner.run(task)
return runner
|
Document utility functions in buildlet.runner
|
## Code Before:
_namemodmap = dict(
SimpleRunner='simple',
IPythonParallelRunner='ipythonparallel',
MultiprocessingRunner='multiprocessingpool',
)
def getrunner(classname):
import sys
module = 'buildlet.runner.{0}'.format(_namemodmap[classname])
__import__(module)
return getattr(sys.modules[module], classname)
def listrunner():
return list(_namemodmap)
def run(classname, task, *args, **kwds):
runner = getrunner(classname)(*args, **kwds)
runner.run(task)
return runner
## Instruction:
Document utility functions in buildlet.runner
## Code After:
_namemodmap = dict(
SimpleRunner='simple',
IPythonParallelRunner='ipythonparallel',
MultiprocessingRunner='multiprocessingpool',
)
def getrunner(classname):
"""
Get a runner class named `classname`.
"""
import sys
module = 'buildlet.runner.{0}'.format(_namemodmap[classname])
__import__(module)
return getattr(sys.modules[module], classname)
def listrunner():
"""
Get a list of runner class names (a list of strings).
"""
return list(_namemodmap)
def run(classname, task, *args, **kwds):
"""
Run `task` using runner named `classname`.
Rest of the arguments are passed to the runner class.
Return the instance of the used runner class.
"""
runner = getrunner(classname)(*args, **kwds)
runner.run(task)
return runner
|
# ... existing code ...
def getrunner(classname):
"""
Get a runner class named `classname`.
"""
import sys
# ... modified code ...
def listrunner():
"""
Get a list of runner class names (a list of strings).
"""
return list(_namemodmap)
...
def run(classname, task, *args, **kwds):
"""
Run `task` using runner named `classname`.
Rest of the arguments are passed to the runner class.
Return the instance of the used runner class.
"""
runner = getrunner(classname)(*args, **kwds)
# ... rest of the code ...
|
5b1ab860a0706831b8abc77a060d6ba89cf8946a
|
interface/subprocess/001.backticks.py
|
interface/subprocess/001.backticks.py
|
import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
|
import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
'''
Execute `command and return output.
- no return code
- no stderr capture
- bailed out with MemoryError on Windows with 500Mb of output
'''
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
|
Add docs to backtics function
|
interface.subprocess: Add docs to backtics function
|
Python
|
unlicense
|
techtonik/discovery,techtonik/discovery,techtonik/discovery
|
import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
+ '''
+ Execute `command and return output.
+ - no return code
+ - no stderr capture
+ - bailed out with MemoryError on Windows with 500Mb of output
+ '''
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
|
Add docs to backtics function
|
## Code Before:
import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
## Instruction:
Add docs to backtics function
## Code After:
import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
'''
Execute `command and return output.
- no return code
- no stderr capture
- bailed out with MemoryError on Windows with 500Mb of output
'''
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
|
// ... existing code ...
def backticks(command):
'''
Execute `command and return output.
- no return code
- no stderr capture
- bailed out with MemoryError on Windows with 500Mb of output
'''
try:
// ... rest of the code ...
|
74f8b419083189ba666459888d1427193c38873e
|
netdisco/discoverables/apple_tv.py
|
netdisco/discoverables/apple_tv.py
|
"""Discover Apple TV media players."""
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
|
"""Discover Apple TV media players."""
import ipaddress
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'host': str(ipaddress.ip_address(entry.address)),
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
|
Add missing host field to Apple TV
|
Add missing host field to Apple TV
|
Python
|
mit
|
brburns/netdisco,balloob/netdisco
|
"""Discover Apple TV media players."""
+ import ipaddress
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
+ 'host': str(ipaddress.ip_address(entry.address)),
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
|
Add missing host field to Apple TV
|
## Code Before:
"""Discover Apple TV media players."""
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
## Instruction:
Add missing host field to Apple TV
## Code After:
"""Discover Apple TV media players."""
import ipaddress
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'host': str(ipaddress.ip_address(entry.address)),
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
|
...
"""Discover Apple TV media players."""
import ipaddress
from . import MDNSDiscoverable
...
info = {
'host': str(ipaddress.ip_address(entry.address)),
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
...
|
3ce0aef8d546f83485c1048dac9e9524f2501552
|
src/wagtail_personalisation/blocks.py
|
src/wagtail_personalisation/blocks.py
|
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from wagtail.core import blocks
from wagtail_personalisation.adapters import get_segment_adapter
from wagtail_personalisation.models import Segment
def list_segment_choices():
for pk, name in Segment.objects.values_list('pk', 'name'):
yield pk, name
class PersonalisedStructBlock(blocks.StructBlock):
"""Struct block that allows personalisation per block."""
segment = blocks.ChoiceBlock(
choices=list_segment_choices,
required=False, label=_("Personalisation segment"),
help_text=_("Only show this content block for users in this segment"))
def render(self, value, context=None):
"""Only render this content block for users in this segment.
:param value: The value from the block
:type value: dict
:param context: The context containing the request
:type context: dict
:returns: The provided block if matched, otherwise an empty string
:rtype: blocks.StructBlock or empty str
"""
request = context['request']
adapter = get_segment_adapter(request)
user_segments = adapter.get_segments()
if value['segment']:
for segment in user_segments:
if segment.id == int(value['segment']):
return super(PersonalisedStructBlock, self).render(
value, context)
return ""
|
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from wagtail.core import blocks
from wagtail_personalisation.adapters import get_segment_adapter
from wagtail_personalisation.models import Segment
def list_segment_choices():
yield -1, ("Show to everyone")
for pk, name in Segment.objects.values_list('pk', 'name'):
yield pk, name
class PersonalisedStructBlock(blocks.StructBlock):
"""Struct block that allows personalisation per block."""
segment = blocks.ChoiceBlock(
choices=list_segment_choices,
required=False, label=_("Personalisation segment"),
help_text=_("Only show this content block for users in this segment"))
def render(self, value, context=None):
"""Only render this content block for users in this segment.
:param value: The value from the block
:type value: dict
:param context: The context containing the request
:type context: dict
:returns: The provided block if matched, otherwise an empty string
:rtype: blocks.StructBlock or empty str
"""
request = context['request']
adapter = get_segment_adapter(request)
user_segments = adapter.get_segments()
try:
segment_id = int(value['segment'])
except (ValueError, TypeError):
return ''
if segment_id > 0:
for segment in user_segments:
if segment.id == segment_id:
return super(PersonalisedStructBlock, self).render(
value, context)
if segment_id == -1:
return super(PersonalisedStructBlock, self).render(
value, context)
return ''
|
Add an option to show a personalised block to everyone
|
Add an option to show a personalised block to everyone
|
Python
|
mit
|
LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation
|
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from wagtail.core import blocks
from wagtail_personalisation.adapters import get_segment_adapter
from wagtail_personalisation.models import Segment
def list_segment_choices():
+ yield -1, ("Show to everyone")
for pk, name in Segment.objects.values_list('pk', 'name'):
yield pk, name
class PersonalisedStructBlock(blocks.StructBlock):
"""Struct block that allows personalisation per block."""
segment = blocks.ChoiceBlock(
choices=list_segment_choices,
required=False, label=_("Personalisation segment"),
help_text=_("Only show this content block for users in this segment"))
def render(self, value, context=None):
"""Only render this content block for users in this segment.
:param value: The value from the block
:type value: dict
:param context: The context containing the request
:type context: dict
:returns: The provided block if matched, otherwise an empty string
:rtype: blocks.StructBlock or empty str
"""
request = context['request']
adapter = get_segment_adapter(request)
user_segments = adapter.get_segments()
- if value['segment']:
+ try:
+ segment_id = int(value['segment'])
+ except (ValueError, TypeError):
+ return ''
+
+ if segment_id > 0:
for segment in user_segments:
- if segment.id == int(value['segment']):
+ if segment.id == segment_id:
return super(PersonalisedStructBlock, self).render(
value, context)
- return ""
+ if segment_id == -1:
+ return super(PersonalisedStructBlock, self).render(
+ value, context)
+ return ''
+
|
Add an option to show a personalised block to everyone
|
## Code Before:
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from wagtail.core import blocks
from wagtail_personalisation.adapters import get_segment_adapter
from wagtail_personalisation.models import Segment
def list_segment_choices():
for pk, name in Segment.objects.values_list('pk', 'name'):
yield pk, name
class PersonalisedStructBlock(blocks.StructBlock):
"""Struct block that allows personalisation per block."""
segment = blocks.ChoiceBlock(
choices=list_segment_choices,
required=False, label=_("Personalisation segment"),
help_text=_("Only show this content block for users in this segment"))
def render(self, value, context=None):
"""Only render this content block for users in this segment.
:param value: The value from the block
:type value: dict
:param context: The context containing the request
:type context: dict
:returns: The provided block if matched, otherwise an empty string
:rtype: blocks.StructBlock or empty str
"""
request = context['request']
adapter = get_segment_adapter(request)
user_segments = adapter.get_segments()
if value['segment']:
for segment in user_segments:
if segment.id == int(value['segment']):
return super(PersonalisedStructBlock, self).render(
value, context)
return ""
## Instruction:
Add an option to show a personalised block to everyone
## Code After:
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from wagtail.core import blocks
from wagtail_personalisation.adapters import get_segment_adapter
from wagtail_personalisation.models import Segment
def list_segment_choices():
yield -1, ("Show to everyone")
for pk, name in Segment.objects.values_list('pk', 'name'):
yield pk, name
class PersonalisedStructBlock(blocks.StructBlock):
"""Struct block that allows personalisation per block."""
segment = blocks.ChoiceBlock(
choices=list_segment_choices,
required=False, label=_("Personalisation segment"),
help_text=_("Only show this content block for users in this segment"))
def render(self, value, context=None):
"""Only render this content block for users in this segment.
:param value: The value from the block
:type value: dict
:param context: The context containing the request
:type context: dict
:returns: The provided block if matched, otherwise an empty string
:rtype: blocks.StructBlock or empty str
"""
request = context['request']
adapter = get_segment_adapter(request)
user_segments = adapter.get_segments()
try:
segment_id = int(value['segment'])
except (ValueError, TypeError):
return ''
if segment_id > 0:
for segment in user_segments:
if segment.id == segment_id:
return super(PersonalisedStructBlock, self).render(
value, context)
if segment_id == -1:
return super(PersonalisedStructBlock, self).render(
value, context)
return ''
|
...
def list_segment_choices():
yield -1, ("Show to everyone")
for pk, name in Segment.objects.values_list('pk', 'name'):
...
try:
segment_id = int(value['segment'])
except (ValueError, TypeError):
return ''
if segment_id > 0:
for segment in user_segments:
if segment.id == segment_id:
return super(PersonalisedStructBlock, self).render(
...
if segment_id == -1:
return super(PersonalisedStructBlock, self).render(
value, context)
return ''
...
|
3e4707a3f25f3a2f84f811394d738cebc1ca9f19
|
mygpo/search/models.py
|
mygpo/search/models.py
|
""" Wrappers for the results of a search """
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
obj.id = doc['_id']
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
|
""" Wrappers for the results of a search """
import uuid
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
obj.id = uuid.UUID(doc['_id']).hex
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
|
Fix parsing UUID in search results
|
Fix parsing UUID in search results
|
Python
|
agpl-3.0
|
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
|
""" Wrappers for the results of a search """
-
+ import uuid
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
- obj.id = doc['_id']
+ obj.id = uuid.UUID(doc['_id']).hex
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
|
Fix parsing UUID in search results
|
## Code Before:
""" Wrappers for the results of a search """
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
obj.id = doc['_id']
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
## Instruction:
Fix parsing UUID in search results
## Code After:
""" Wrappers for the results of a search """
import uuid
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
obj.id = uuid.UUID(doc['_id']).hex
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
|
// ... existing code ...
import uuid
// ... modified code ...
obj.id = uuid.UUID(doc['_id']).hex
return obj
// ... rest of the code ...
|
8359d60480371a8f63bdd4ea1b7cf03f231c1350
|
djangopress/settings_tinymce.py
|
djangopress/settings_tinymce.py
|
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
|
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
|
Update settings for tinymce to allow show_blog_latest tag
|
Update settings for tinymce to allow show_blog_latest tag
|
Python
|
mit
|
codefisher/djangopress,codefisher/djangopress,codefisher/djangopress,codefisher/djangopress
|
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
- 'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
+ 'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
|
Update settings for tinymce to allow show_blog_latest tag
|
## Code Before:
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
## Instruction:
Update settings for tinymce to allow show_blog_latest tag
## Code After:
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
|
...
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
...
|
bcca20cecbc664422f72359ba4fba7d55e833b32
|
swampdragon/connections/sockjs_connection.py
|
swampdragon/connections/sockjs_connection.py
|
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
|
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
channels = []
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
|
Include channel list in connection
|
Include channel list in connection
|
Python
|
bsd-3-clause
|
sahlinet/swampdragon,denizs/swampdragon,michael-k/swampdragon,seclinch/swampdragon,Manuel4131/swampdragon,aexeagmbh/swampdragon,Manuel4131/swampdragon,d9pouces/swampdragon,aexeagmbh/swampdragon,michael-k/swampdragon,d9pouces/swampdragon,boris-savic/swampdragon,boris-savic/swampdragon,jonashagstedt/swampdragon,jonashagstedt/swampdragon,faulkner/swampdragon,faulkner/swampdragon,aexeagmbh/swampdragon,Manuel4131/swampdragon,michael-k/swampdragon,d9pouces/swampdragon,denizs/swampdragon,sahlinet/swampdragon,bastianh/swampdragon,h-hirokawa/swampdragon,bastianh/swampdragon,seclinch/swampdragon,faulkner/swampdragon,bastianh/swampdragon,h-hirokawa/swampdragon,sahlinet/swampdragon,seclinch/swampdragon,jonashagstedt/swampdragon,denizs/swampdragon,boris-savic/swampdragon
|
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
+ channels = []
+
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
|
Include channel list in connection
|
## Code Before:
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
## Instruction:
Include channel list in connection
## Code After:
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
channels = []
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
|
...
class SubscriberConnection(ConnectionMixin, SockJSConnection):
channels = []
def __init__(self, session):
...
|
4fbec4f4c0741edb6207d762cc92e48c6f249eec
|
dragonflow/common/extensions.py
|
dragonflow/common/extensions.py
|
SUPPORTED_API_EXTENSIONS = [
'agent',
'quotas',
'extra_dhcp_opt',
'binding',
'dhcp_agent_scheduler',
'security-group',
'external-net',
'router',
'l3_agent_scheduler',
'subnet_allocation',
'port-security',
'allowed-address-pairs',
'net-mtu',
'default-subnetpools',
'extraroute',
'bgp',
'trunk',
'flow_classifier',
'sfc',
]
|
SUPPORTED_API_EXTENSIONS = [
'agent',
'quotas',
'extra_dhcp_opt',
'binding',
'dhcp_agent_scheduler',
'security-group',
'external-net',
'router',
'subnet_allocation',
'port-security',
'allowed-address-pairs',
'net-mtu',
'default-subnetpools',
'extraroute',
'bgp',
'trunk',
'flow_classifier',
'sfc',
]
|
Disable L3 agents scheduler extension in Tempest
|
Disable L3 agents scheduler extension in Tempest
Change-Id: Ibc2d85bce9abb821e897693ebdade66d3b9199c3
Closes-Bug: #1707496
|
Python
|
apache-2.0
|
openstack/dragonflow,openstack/dragonflow,openstack/dragonflow
|
SUPPORTED_API_EXTENSIONS = [
'agent',
'quotas',
'extra_dhcp_opt',
'binding',
'dhcp_agent_scheduler',
'security-group',
'external-net',
'router',
- 'l3_agent_scheduler',
'subnet_allocation',
'port-security',
'allowed-address-pairs',
'net-mtu',
'default-subnetpools',
'extraroute',
'bgp',
'trunk',
'flow_classifier',
'sfc',
]
|
Disable L3 agents scheduler extension in Tempest
|
## Code Before:
SUPPORTED_API_EXTENSIONS = [
'agent',
'quotas',
'extra_dhcp_opt',
'binding',
'dhcp_agent_scheduler',
'security-group',
'external-net',
'router',
'l3_agent_scheduler',
'subnet_allocation',
'port-security',
'allowed-address-pairs',
'net-mtu',
'default-subnetpools',
'extraroute',
'bgp',
'trunk',
'flow_classifier',
'sfc',
]
## Instruction:
Disable L3 agents scheduler extension in Tempest
## Code After:
SUPPORTED_API_EXTENSIONS = [
'agent',
'quotas',
'extra_dhcp_opt',
'binding',
'dhcp_agent_scheduler',
'security-group',
'external-net',
'router',
'subnet_allocation',
'port-security',
'allowed-address-pairs',
'net-mtu',
'default-subnetpools',
'extraroute',
'bgp',
'trunk',
'flow_classifier',
'sfc',
]
|
# ... existing code ...
'router',
'subnet_allocation',
# ... rest of the code ...
|
6fe391b2e2f9b88a6835a6636a5d58810852ab5e
|
pyhole/tests/test_log.py
|
pyhole/tests/test_log.py
|
"""Pyhole Log Unit Tests"""
import os
import unittest
from pyhole.core import logger
from pyhole.core import utils
class TestLogger(unittest.TestCase):
def test_logger(self):
test_log_dir = utils.get_home_directory() + "logs/"
try:
# NOTE(jk0): If the configuration file doesn't exist, the config
# class will generate it and raise a SystemExit.
logger.setup_logger(name="test")
except SystemExit:
logger.setup_logger(name="test")
test_log = logger.get_logger("TEST")
self.assertEqual("TEST", test_log.name)
self.assertEqual(test_log.level, 0)
os.unlink(test_log_dir + "test.log")
|
"""Pyhole Log Unit Tests"""
import os
import unittest
from pyhole.core import logger
from pyhole.core import utils
class TestLogger(unittest.TestCase):
def test_logger(self):
test_log_dir = utils.get_home_directory() + "logs/"
try:
# NOTE(jk0): If the configuration file doesn't exist, the config
# class will generate it and raise a SystemExit.
logger.setup_logger("test")
except SystemExit:
logger.setup_logger("test")
test_log = logger.get_logger("TEST")
self.assertEqual("TEST", test_log.name)
self.assertEqual(test_log.level, 0)
os.unlink(test_log_dir + "test.log")
|
Use setup_logger properly in tests.
|
Use setup_logger properly in tests.
|
Python
|
apache-2.0
|
jk0/pyhole,jk0/pyhole,jk0/pyhole
|
"""Pyhole Log Unit Tests"""
import os
import unittest
from pyhole.core import logger
from pyhole.core import utils
class TestLogger(unittest.TestCase):
def test_logger(self):
test_log_dir = utils.get_home_directory() + "logs/"
try:
# NOTE(jk0): If the configuration file doesn't exist, the config
# class will generate it and raise a SystemExit.
- logger.setup_logger(name="test")
+ logger.setup_logger("test")
except SystemExit:
- logger.setup_logger(name="test")
+ logger.setup_logger("test")
test_log = logger.get_logger("TEST")
self.assertEqual("TEST", test_log.name)
self.assertEqual(test_log.level, 0)
os.unlink(test_log_dir + "test.log")
|
Use setup_logger properly in tests.
|
## Code Before:
"""Pyhole Log Unit Tests"""
import os
import unittest
from pyhole.core import logger
from pyhole.core import utils
class TestLogger(unittest.TestCase):
def test_logger(self):
test_log_dir = utils.get_home_directory() + "logs/"
try:
# NOTE(jk0): If the configuration file doesn't exist, the config
# class will generate it and raise a SystemExit.
logger.setup_logger(name="test")
except SystemExit:
logger.setup_logger(name="test")
test_log = logger.get_logger("TEST")
self.assertEqual("TEST", test_log.name)
self.assertEqual(test_log.level, 0)
os.unlink(test_log_dir + "test.log")
## Instruction:
Use setup_logger properly in tests.
## Code After:
"""Pyhole Log Unit Tests"""
import os
import unittest
from pyhole.core import logger
from pyhole.core import utils
class TestLogger(unittest.TestCase):
def test_logger(self):
test_log_dir = utils.get_home_directory() + "logs/"
try:
# NOTE(jk0): If the configuration file doesn't exist, the config
# class will generate it and raise a SystemExit.
logger.setup_logger("test")
except SystemExit:
logger.setup_logger("test")
test_log = logger.get_logger("TEST")
self.assertEqual("TEST", test_log.name)
self.assertEqual(test_log.level, 0)
os.unlink(test_log_dir + "test.log")
|
// ... existing code ...
# class will generate it and raise a SystemExit.
logger.setup_logger("test")
except SystemExit:
logger.setup_logger("test")
// ... rest of the code ...
|
cb2f768f01cc3d40fe95574d0702470d480888c2
|
DTError.py
|
DTError.py
|
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile[name] = _errors
|
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
import os
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
if fcn == None:
fcn = os.path.basename(sys.argv[0])
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile.write_anonymous(_errors, name)
|
Allow passing None for function, and use the executable name in that case. Save error list anonymously
|
Allow passing None for function, and use the executable
name in that case.
Save error list anonymously
|
Python
|
bsd-3-clause
|
amaxwell/datatank_py
|
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
+ import os
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
+
+ if fcn == None:
+ fcn = os.path.basename(sys.argv[0])
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
- datafile[name] = _errors
+ datafile.write_anonymous(_errors, name)
|
Allow passing None for function, and use the executable name in that case. Save error list anonymously
|
## Code Before:
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile[name] = _errors
## Instruction:
Allow passing None for function, and use the executable name in that case. Save error list anonymously
## Code After:
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
import os
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
if fcn == None:
fcn = os.path.basename(sys.argv[0])
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile.write_anonymous(_errors, name)
|
// ... existing code ...
import sys
import os
// ... modified code ...
"""
if fcn == None:
fcn = os.path.basename(sys.argv[0])
...
if len(_errors):
datafile.write_anonymous(_errors, name)
// ... rest of the code ...
|
66946f72d243f1836df0dbd8917f204011ec1701
|
hs_core/autocomplete_light_registry.py
|
hs_core/autocomplete_light_registry.py
|
from autocomplete_light import shortcuts as autocomplete_light
from django.contrib.auth.models import User, Group
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['username', 'first_name', 'last_name']
split_words = True
def choices_for_request(self):
self.choices = self.choices.filter(is_active=True)
return super(UserAutocomplete, self).choices_for_request()
def choice_label(self, choice):
label = ""
if choice.first_name:
label += choice.first_name
if choice.last_name:
if choice.first_name:
label += " "
label += choice.last_name
if choice.userprofile.organization:
if choice.first_name or choice.last_name:
label += ", "
label += choice.userprofile.organization
if choice.username:
label += "".join([" (", choice.username, ")"])
return label
autocomplete_light.register(User, UserAutocomplete)
class GroupAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields=['name']
def choices_for_request(self):
self.choices = self.choices.filter(gaccess__active=True).exclude(name='Hydroshare Author')
return super(GroupAutocomplete, self).choices_for_request()
autocomplete_light.register(Group, GroupAutocomplete)
|
from autocomplete_light import shortcuts as autocomplete_light
from django.contrib.auth.models import User, Group
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['username', 'first_name', 'last_name']
split_words = True
def choices_for_request(self):
self.choices = self.choices.filter(is_active=True)
return super(UserAutocomplete, self).choices_for_request()
def choice_label(self, choice):
label = " ".join([choice.first_name or "", choice.userprofile.middle_name or "", choice.last_name or ""])
if choice.userprofile.organization:
if choice.first_name or choice.last_name:
label += ", "
label += choice.userprofile.organization
if choice.username:
label += "".join([" (", choice.username, ")"])
return label
autocomplete_light.register(User, UserAutocomplete)
class GroupAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields=['name']
def choices_for_request(self):
self.choices = self.choices.filter(gaccess__active=True).exclude(name='Hydroshare Author')
return super(GroupAutocomplete, self).choices_for_request()
autocomplete_light.register(Group, GroupAutocomplete)
|
Add middle name display to autocomplete widget
|
Add middle name display to autocomplete widget
|
Python
|
bsd-3-clause
|
hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare
|
from autocomplete_light import shortcuts as autocomplete_light
from django.contrib.auth.models import User, Group
+
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['username', 'first_name', 'last_name']
split_words = True
def choices_for_request(self):
self.choices = self.choices.filter(is_active=True)
return super(UserAutocomplete, self).choices_for_request()
def choice_label(self, choice):
+ label = " ".join([choice.first_name or "", choice.userprofile.middle_name or "", choice.last_name or ""])
- label = ""
-
- if choice.first_name:
- label += choice.first_name
-
- if choice.last_name:
- if choice.first_name:
- label += " "
- label += choice.last_name
if choice.userprofile.organization:
if choice.first_name or choice.last_name:
label += ", "
label += choice.userprofile.organization
if choice.username:
label += "".join([" (", choice.username, ")"])
return label
autocomplete_light.register(User, UserAutocomplete)
class GroupAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields=['name']
def choices_for_request(self):
self.choices = self.choices.filter(gaccess__active=True).exclude(name='Hydroshare Author')
return super(GroupAutocomplete, self).choices_for_request()
autocomplete_light.register(Group, GroupAutocomplete)
|
Add middle name display to autocomplete widget
|
## Code Before:
from autocomplete_light import shortcuts as autocomplete_light
from django.contrib.auth.models import User, Group
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['username', 'first_name', 'last_name']
split_words = True
def choices_for_request(self):
self.choices = self.choices.filter(is_active=True)
return super(UserAutocomplete, self).choices_for_request()
def choice_label(self, choice):
label = ""
if choice.first_name:
label += choice.first_name
if choice.last_name:
if choice.first_name:
label += " "
label += choice.last_name
if choice.userprofile.organization:
if choice.first_name or choice.last_name:
label += ", "
label += choice.userprofile.organization
if choice.username:
label += "".join([" (", choice.username, ")"])
return label
autocomplete_light.register(User, UserAutocomplete)
class GroupAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields=['name']
def choices_for_request(self):
self.choices = self.choices.filter(gaccess__active=True).exclude(name='Hydroshare Author')
return super(GroupAutocomplete, self).choices_for_request()
autocomplete_light.register(Group, GroupAutocomplete)
## Instruction:
Add middle name display to autocomplete widget
## Code After:
from autocomplete_light import shortcuts as autocomplete_light
from django.contrib.auth.models import User, Group
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['username', 'first_name', 'last_name']
split_words = True
def choices_for_request(self):
self.choices = self.choices.filter(is_active=True)
return super(UserAutocomplete, self).choices_for_request()
def choice_label(self, choice):
label = " ".join([choice.first_name or "", choice.userprofile.middle_name or "", choice.last_name or ""])
if choice.userprofile.organization:
if choice.first_name or choice.last_name:
label += ", "
label += choice.userprofile.organization
if choice.username:
label += "".join([" (", choice.username, ")"])
return label
autocomplete_light.register(User, UserAutocomplete)
class GroupAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields=['name']
def choices_for_request(self):
self.choices = self.choices.filter(gaccess__active=True).exclude(name='Hydroshare Author')
return super(GroupAutocomplete, self).choices_for_request()
autocomplete_light.register(Group, GroupAutocomplete)
|
// ... existing code ...
from django.contrib.auth.models import User, Group
// ... modified code ...
def choice_label(self, choice):
label = " ".join([choice.first_name or "", choice.userprofile.middle_name or "", choice.last_name or ""])
// ... rest of the code ...
|
a93ad6ce9a264a82717a37230e48ff00d9c642fc
|
pywikibot/families/wikidata_family.py
|
pywikibot/families/wikidata_family.py
|
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2'}
|
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2',
'moon': 'http://www.wikidata.org/entity/Q405'}
|
Add moon (Q405) to the list of globes
|
Add moon (Q405) to the list of globes
Change-Id: I2dd9f87fcb1d748bff94328575f8439dc36035e3
|
Python
|
mit
|
magul/pywikibot-core,wikimedia/pywikibot-core,happy5214/pywikibot-core,jayvdb/pywikibot-core,Darkdadaah/pywikibot-core,trishnaguha/pywikibot-core,VcamX/pywikibot-core,hasteur/g13bot_tools_new,h4ck3rm1k3/pywikibot-core,hasteur/g13bot_tools_new,wikimedia/pywikibot-core,hasteur/g13bot_tools_new,PersianWikipedia/pywikibot-core,smalyshev/pywikibot-core,darthbhyrava/pywikibot-local,magul/pywikibot-core,TridevGuha/pywikibot-core,jayvdb/pywikibot-core,happy5214/pywikibot-core,h4ck3rm1k3/pywikibot-core,npdoty/pywikibot,valhallasw/pywikibot-core,icyflame/batman,Darkdadaah/pywikibot-core,emijrp/pywikibot-core,xZise/pywikibot-core,npdoty/pywikibot
|
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
- return {'earth': 'http://www.wikidata.org/entity/Q2'}
+ return {'earth': 'http://www.wikidata.org/entity/Q2',
+ 'moon': 'http://www.wikidata.org/entity/Q405'}
|
Add moon (Q405) to the list of globes
|
## Code Before:
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2'}
## Instruction:
Add moon (Q405) to the list of globes
## Code After:
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2',
'moon': 'http://www.wikidata.org/entity/Q405'}
|
...
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2',
'moon': 'http://www.wikidata.org/entity/Q405'}
...
|
43afda1fa0ae2d0011d6b87b5c05e3eb1fe13a21
|
viewer_examples/viewers/collection_viewer.py
|
viewer_examples/viewers/collection_viewer.py
|
import numpy as np
from skimage import data
from skimage.viewer import CollectionViewer
img = data.lena()
img_collection = [np.uint8(img * 0.9**i) for i in range(20)]
view = CollectionViewer(img_collection)
view.show()
|
import numpy as np
from skimage import data
from skimage.viewer import CollectionViewer
from skimage.transform import build_gaussian_pyramid
img = data.lena()
img_collection = tuple(build_gaussian_pyramid(img))
view = CollectionViewer(img_collection)
view.show()
|
Use gaussian pyramid function for collection viewer example
|
Use gaussian pyramid function for collection viewer example
|
Python
|
bsd-3-clause
|
rjeli/scikit-image,juliusbierk/scikit-image,vighneshbirodkar/scikit-image,Midafi/scikit-image,newville/scikit-image,SamHames/scikit-image,bennlich/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,blink1073/scikit-image,GaZ3ll3/scikit-image,keflavich/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,emon10005/scikit-image,youprofit/scikit-image,ofgulban/scikit-image,newville/scikit-image,bsipocz/scikit-image,Midafi/scikit-image,almarklein/scikit-image,jwiggins/scikit-image,rjeli/scikit-image,chintak/scikit-image,SamHames/scikit-image,michaelaye/scikit-image,chintak/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,dpshelio/scikit-image,paalge/scikit-image,vighneshbirodkar/scikit-image,bennlich/scikit-image,almarklein/scikit-image,oew1v07/scikit-image,Britefury/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,blink1073/scikit-image,rjeli/scikit-image,jwiggins/scikit-image,paalge/scikit-image,GaZ3ll3/scikit-image,warmspringwinds/scikit-image,almarklein/scikit-image,Hiyorimi/scikit-image,juliusbierk/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,Hiyorimi/scikit-image,SamHames/scikit-image,michaelpacer/scikit-image,bsipocz/scikit-image,ajaybhat/scikit-image,oew1v07/scikit-image,warmspringwinds/scikit-image,ClinicalGraphics/scikit-image,Britefury/scikit-image,robintw/scikit-image,SamHames/scikit-image,paalge/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,emon10005/scikit-image,youprofit/scikit-image,michaelaye/scikit-image,ajaybhat/scikit-image,dpshelio/scikit-image,pratapvardhan/scikit-image,robintw/scikit-image,WarrenWeckesser/scikits-image
|
import numpy as np
from skimage import data
from skimage.viewer import CollectionViewer
+ from skimage.transform import build_gaussian_pyramid
+
img = data.lena()
- img_collection = [np.uint8(img * 0.9**i) for i in range(20)]
+ img_collection = tuple(build_gaussian_pyramid(img))
view = CollectionViewer(img_collection)
view.show()
|
Use gaussian pyramid function for collection viewer example
|
## Code Before:
import numpy as np
from skimage import data
from skimage.viewer import CollectionViewer
img = data.lena()
img_collection = [np.uint8(img * 0.9**i) for i in range(20)]
view = CollectionViewer(img_collection)
view.show()
## Instruction:
Use gaussian pyramid function for collection viewer example
## Code After:
import numpy as np
from skimage import data
from skimage.viewer import CollectionViewer
from skimage.transform import build_gaussian_pyramid
img = data.lena()
img_collection = tuple(build_gaussian_pyramid(img))
view = CollectionViewer(img_collection)
view.show()
|
// ... existing code ...
from skimage.viewer import CollectionViewer
from skimage.transform import build_gaussian_pyramid
// ... modified code ...
img = data.lena()
img_collection = tuple(build_gaussian_pyramid(img))
// ... rest of the code ...
|
6f6c743a03d8162abca9e5406e5e6c2e51f77052
|
users/views.py
|
users/views.py
|
from django.shortcuts import render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
model = User
form_class = SimpleRegistrationForm
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
|
from django.shortcuts import redirect, render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
from users.models import Users
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
#model = User
form_class = SimpleRegistrationForm
success_url = '/'
def form_valid(self, form):
#print self.request.META['HTTP_HOST']
user = User()
ce = form.cleaned_data
user.email = ce['email']
user.login = ce['email']
user.set_password(ce['password1'])
user.save()
#profile = Users.objects.create(user=user, ipAddr=self.request.META['REMOTE_ADDR'])
#profile.user = user
#profile.email = ce['email']
#profile.ipAddr = vself.request.META['REMOTE_ADDR']
#profile.save()
return redirect(self.success_url)
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
|
Fix redirect after success registration
|
Fix redirect after success registration
|
Python
|
mit
|
sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/Rynda
|
- from django.shortcuts import render_to_response
+ from django.shortcuts import redirect, render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
-
+ from users.models import Users
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
- model = User
+ #model = User
form_class = SimpleRegistrationForm
+ success_url = '/'
+
+ def form_valid(self, form):
+ #print self.request.META['HTTP_HOST']
+ user = User()
+ ce = form.cleaned_data
+ user.email = ce['email']
+ user.login = ce['email']
+ user.set_password(ce['password1'])
+ user.save()
+ #profile = Users.objects.create(user=user, ipAddr=self.request.META['REMOTE_ADDR'])
+ #profile.user = user
+ #profile.email = ce['email']
+ #profile.ipAddr = vself.request.META['REMOTE_ADDR']
+ #profile.save()
+ return redirect(self.success_url)
+
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
|
Fix redirect after success registration
|
## Code Before:
from django.shortcuts import render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
model = User
form_class = SimpleRegistrationForm
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
## Instruction:
Fix redirect after success registration
## Code After:
from django.shortcuts import redirect, render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
from users.models import Users
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
#model = User
form_class = SimpleRegistrationForm
success_url = '/'
def form_valid(self, form):
#print self.request.META['HTTP_HOST']
user = User()
ce = form.cleaned_data
user.email = ce['email']
user.login = ce['email']
user.set_password(ce['password1'])
user.save()
#profile = Users.objects.create(user=user, ipAddr=self.request.META['REMOTE_ADDR'])
#profile.user = user
#profile.email = ce['email']
#profile.ipAddr = vself.request.META['REMOTE_ADDR']
#profile.save()
return redirect(self.success_url)
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
|
// ... existing code ...
from django.shortcuts import redirect, render_to_response
from django.contrib.auth.models import User
// ... modified code ...
from users.forms import SimpleRegistrationForm
from users.models import Users
...
template_name = 'registerform_simple.html'
#model = User
form_class = SimpleRegistrationForm
success_url = '/'
def form_valid(self, form):
#print self.request.META['HTTP_HOST']
user = User()
ce = form.cleaned_data
user.email = ce['email']
user.login = ce['email']
user.set_password(ce['password1'])
user.save()
#profile = Users.objects.create(user=user, ipAddr=self.request.META['REMOTE_ADDR'])
#profile.user = user
#profile.email = ce['email']
#profile.ipAddr = vself.request.META['REMOTE_ADDR']
#profile.save()
return redirect(self.success_url)
// ... rest of the code ...
|
98cbd5207bd25fb0fafd25f18870c771479255e1
|
run-tests.py
|
run-tests.py
|
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
Enable default warnings while testing.
|
Enable default warnings while testing.
|
Python
|
mit
|
shawnbrown/gpn,shawnbrown/gpn
|
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
+ '-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
Enable default warnings while testing.
|
## Code Before:
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
## Instruction:
Enable default warnings while testing.
## Code After:
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
// ... existing code ...
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
// ... rest of the code ...
|
e7627ee439e2e4f17466bf124629ae353460a68d
|
__init__.py
|
__init__.py
|
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
import product
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Change product types are really dangerous!!!
|
[FIX] Change product types are really dangerous!!!
|
Python
|
agpl-3.0
|
odoo-l10n-ar/l10n_ar_invoice
|
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
- import product
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Change product types are really dangerous!!!
|
## Code Before:
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
import product
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
## Instruction:
Change product types are really dangerous!!!
## Code After:
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# ... existing code ...
import currency
# ... rest of the code ...
|
6c6a774ef2614ca82fbe61ec04e9b6a75415b015
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name = 'openhomedevice',
packages = ['openhomedevice'],
version = '0.2.1',
description = 'Provides an API for requesting information from an Openhome device',
author = 'Barry John Williams',
author_email = '[email protected]',
url = 'https://github.com/bazwilliams/openhomedevice',
download_url = 'https://github.com/bazwilliams/openhomedevice/tarball/0.2',
keywords = ['upnp', 'dlna', 'openhome', 'linn', 'ds', 'music', 'render'],
install_requires = ['requests', 'lxml'],
classifiers = [],
)
|
from distutils.core import setup
setup(
name = 'openhomedevice',
packages = ['openhomedevice'],
version = '0.2.2',
description = 'Provides an API for requesting information from an Openhome device',
author = 'Barry John Williams',
author_email = '[email protected]',
url = 'https://github.com/cak85/openhomedevice',
download_url = 'https://github.com/cak85/openhomedevice/tarball/0.2.2',
keywords = ['upnp', 'dlna', 'openhome', 'linn', 'ds', 'music', 'render'],
install_requires = ['requests', 'lxml'],
classifiers = [],
)
|
Change url to this fork
|
Change url to this fork
|
Python
|
mit
|
bazwilliams/openhomedevice
|
from distutils.core import setup
setup(
name = 'openhomedevice',
packages = ['openhomedevice'],
- version = '0.2.1',
+ version = '0.2.2',
description = 'Provides an API for requesting information from an Openhome device',
author = 'Barry John Williams',
author_email = '[email protected]',
- url = 'https://github.com/bazwilliams/openhomedevice',
+ url = 'https://github.com/cak85/openhomedevice',
- download_url = 'https://github.com/bazwilliams/openhomedevice/tarball/0.2',
+ download_url = 'https://github.com/cak85/openhomedevice/tarball/0.2.2',
keywords = ['upnp', 'dlna', 'openhome', 'linn', 'ds', 'music', 'render'],
install_requires = ['requests', 'lxml'],
classifiers = [],
)
|
Change url to this fork
|
## Code Before:
from distutils.core import setup
setup(
name = 'openhomedevice',
packages = ['openhomedevice'],
version = '0.2.1',
description = 'Provides an API for requesting information from an Openhome device',
author = 'Barry John Williams',
author_email = '[email protected]',
url = 'https://github.com/bazwilliams/openhomedevice',
download_url = 'https://github.com/bazwilliams/openhomedevice/tarball/0.2',
keywords = ['upnp', 'dlna', 'openhome', 'linn', 'ds', 'music', 'render'],
install_requires = ['requests', 'lxml'],
classifiers = [],
)
## Instruction:
Change url to this fork
## Code After:
from distutils.core import setup
setup(
name = 'openhomedevice',
packages = ['openhomedevice'],
version = '0.2.2',
description = 'Provides an API for requesting information from an Openhome device',
author = 'Barry John Williams',
author_email = '[email protected]',
url = 'https://github.com/cak85/openhomedevice',
download_url = 'https://github.com/cak85/openhomedevice/tarball/0.2.2',
keywords = ['upnp', 'dlna', 'openhome', 'linn', 'ds', 'music', 'render'],
install_requires = ['requests', 'lxml'],
classifiers = [],
)
|
# ... existing code ...
packages = ['openhomedevice'],
version = '0.2.2',
description = 'Provides an API for requesting information from an Openhome device',
# ... modified code ...
author_email = '[email protected]',
url = 'https://github.com/cak85/openhomedevice',
download_url = 'https://github.com/cak85/openhomedevice/tarball/0.2.2',
keywords = ['upnp', 'dlna', 'openhome', 'linn', 'ds', 'music', 'render'],
# ... rest of the code ...
|
82cb6d190ce1e805914cc791518c97e063ecdc96
|
tests/test_individual.py
|
tests/test_individual.py
|
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
print(myPath)
sys.path.insert(0, myPath + '/../SATSolver')
from unittest import TestCase
from individual import Individual
from BitVector import BitVector
from bitarray import bitarray
class TestIndividual(TestCase):
"""
Testing class for Individual.
"""
def test_get(self):
ind = Individual(9)
ind.data = bitarray("011010100")
self.assertEqual(ind.get(5), 1)
self.assertEqual(ind.get(1), 0)
self.assertEqual(ind.get(10), None)
def test_set(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.set(2, 1)
self.assertEqual(ind.get(2), 1)
ind.set(7, 0)
self.assertEqual(ind.get(7), 0)
ind.set(6, 1)
self.assertEqual(ind.get(6), 1)
def test_flip(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.flip(1)
self.assertEqual(ind.get(1), 1)
ind.flip(8)
self.assertEqual(ind.get(8), 1)
ind.flip(4)
self.assertEqual(ind.get(4), 1)
|
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
print(myPath)
sys.path.insert(0, myPath + '/../SATSolver')
from unittest import TestCase
from individual import Individual
from bitarray import bitarray
class TestIndividual(TestCase):
"""
Testing class for Individual.
"""
def test_get(self):
ind = Individual(9)
ind.data = bitarray("011010100")
self.assertEqual(ind.get(5), 1)
self.assertEqual(ind.get(1), 0)
self.assertEqual(ind.get(10), None)
def test_set(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.set(2, 1)
self.assertEqual(ind.get(2), 1)
ind.set(7, 0)
self.assertEqual(ind.get(7), 0)
ind.set(6, 1)
self.assertEqual(ind.get(6), 1)
def test_flip(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.flip(1)
self.assertEqual(ind.get(1), 1)
ind.flip(8)
self.assertEqual(ind.get(8), 1)
ind.flip(4)
self.assertEqual(ind.get(4), 1)
|
Remove BitVector import - Build fails
|
Remove BitVector import - Build fails
|
Python
|
mit
|
Imperium-Software/resolver,Imperium-Software/resolver,Imperium-Software/resolver,Imperium-Software/resolver
|
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
print(myPath)
sys.path.insert(0, myPath + '/../SATSolver')
from unittest import TestCase
from individual import Individual
- from BitVector import BitVector
from bitarray import bitarray
class TestIndividual(TestCase):
"""
Testing class for Individual.
"""
def test_get(self):
ind = Individual(9)
ind.data = bitarray("011010100")
self.assertEqual(ind.get(5), 1)
self.assertEqual(ind.get(1), 0)
self.assertEqual(ind.get(10), None)
def test_set(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.set(2, 1)
self.assertEqual(ind.get(2), 1)
ind.set(7, 0)
self.assertEqual(ind.get(7), 0)
ind.set(6, 1)
self.assertEqual(ind.get(6), 1)
def test_flip(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.flip(1)
self.assertEqual(ind.get(1), 1)
ind.flip(8)
self.assertEqual(ind.get(8), 1)
ind.flip(4)
self.assertEqual(ind.get(4), 1)
|
Remove BitVector import - Build fails
|
## Code Before:
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
print(myPath)
sys.path.insert(0, myPath + '/../SATSolver')
from unittest import TestCase
from individual import Individual
from BitVector import BitVector
from bitarray import bitarray
class TestIndividual(TestCase):
"""
Testing class for Individual.
"""
def test_get(self):
ind = Individual(9)
ind.data = bitarray("011010100")
self.assertEqual(ind.get(5), 1)
self.assertEqual(ind.get(1), 0)
self.assertEqual(ind.get(10), None)
def test_set(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.set(2, 1)
self.assertEqual(ind.get(2), 1)
ind.set(7, 0)
self.assertEqual(ind.get(7), 0)
ind.set(6, 1)
self.assertEqual(ind.get(6), 1)
def test_flip(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.flip(1)
self.assertEqual(ind.get(1), 1)
ind.flip(8)
self.assertEqual(ind.get(8), 1)
ind.flip(4)
self.assertEqual(ind.get(4), 1)
## Instruction:
Remove BitVector import - Build fails
## Code After:
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
print(myPath)
sys.path.insert(0, myPath + '/../SATSolver')
from unittest import TestCase
from individual import Individual
from bitarray import bitarray
class TestIndividual(TestCase):
"""
Testing class for Individual.
"""
def test_get(self):
ind = Individual(9)
ind.data = bitarray("011010100")
self.assertEqual(ind.get(5), 1)
self.assertEqual(ind.get(1), 0)
self.assertEqual(ind.get(10), None)
def test_set(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.set(2, 1)
self.assertEqual(ind.get(2), 1)
ind.set(7, 0)
self.assertEqual(ind.get(7), 0)
ind.set(6, 1)
self.assertEqual(ind.get(6), 1)
def test_flip(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.flip(1)
self.assertEqual(ind.get(1), 1)
ind.flip(8)
self.assertEqual(ind.get(8), 1)
ind.flip(4)
self.assertEqual(ind.get(4), 1)
|
# ... existing code ...
from individual import Individual
from bitarray import bitarray
# ... rest of the code ...
|
4ebdc10add211cb238002fcc79a7cf8409d99825
|
djoser/social/views.py
|
djoser/social/views.py
|
from rest_framework import generics, permissions, status
from rest_framework.response import Response
from social_django.utils import load_backend, load_strategy
from djoser.conf import settings
from djoser.social.serializers import ProviderAuthSerializer
class ProviderAuthView(generics.CreateAPIView):
permission_classes = [permissions.AllowAny]
serializer_class = ProviderAuthSerializer
def get(self, request, *args, **kwargs):
redirect_uri = request.GET.get("redirect_uri")
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
return Response(status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
strategy.session_set("redirect_uri", redirect_uri)
backend_name = self.kwargs["provider"]
backend = load_backend(strategy, backend_name, redirect_uri=redirect_uri)
authorization_url = backend.auth_url()
return Response(data={"authorization_url": authorization_url})
|
from rest_framework import generics, permissions, status
from rest_framework.response import Response
from social_django.utils import load_backend, load_strategy
from djoser.conf import settings
from djoser.social.serializers import ProviderAuthSerializer
class ProviderAuthView(generics.CreateAPIView):
permission_classes = [permissions.AllowAny]
serializer_class = ProviderAuthSerializer
def get(self, request, *args, **kwargs):
redirect_uri = request.GET.get("redirect_uri")
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
return Response("Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS", status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
strategy.session_set("redirect_uri", redirect_uri)
backend_name = self.kwargs["provider"]
backend = load_backend(strategy, backend_name, redirect_uri=redirect_uri)
authorization_url = backend.auth_url()
return Response(data={"authorization_url": authorization_url})
|
Fix for Friendly tips when Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS
|
Fix for Friendly tips when Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS
i forget add SOCIAL_AUTH_ALLOWED_REDIRECT_URIS to my config
it return 400 error, i don't know why , i pay more time find the issues
so i add Friendly tips
-- sorry , my english is not well
and thank you all
|
Python
|
mit
|
sunscrapers/djoser,sunscrapers/djoser,sunscrapers/djoser
|
from rest_framework import generics, permissions, status
from rest_framework.response import Response
from social_django.utils import load_backend, load_strategy
from djoser.conf import settings
from djoser.social.serializers import ProviderAuthSerializer
class ProviderAuthView(generics.CreateAPIView):
permission_classes = [permissions.AllowAny]
serializer_class = ProviderAuthSerializer
def get(self, request, *args, **kwargs):
redirect_uri = request.GET.get("redirect_uri")
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
- return Response(status=status.HTTP_400_BAD_REQUEST)
+ return Response("Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS", status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
strategy.session_set("redirect_uri", redirect_uri)
backend_name = self.kwargs["provider"]
backend = load_backend(strategy, backend_name, redirect_uri=redirect_uri)
authorization_url = backend.auth_url()
return Response(data={"authorization_url": authorization_url})
|
Fix for Friendly tips when Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS
|
## Code Before:
from rest_framework import generics, permissions, status
from rest_framework.response import Response
from social_django.utils import load_backend, load_strategy
from djoser.conf import settings
from djoser.social.serializers import ProviderAuthSerializer
class ProviderAuthView(generics.CreateAPIView):
permission_classes = [permissions.AllowAny]
serializer_class = ProviderAuthSerializer
def get(self, request, *args, **kwargs):
redirect_uri = request.GET.get("redirect_uri")
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
return Response(status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
strategy.session_set("redirect_uri", redirect_uri)
backend_name = self.kwargs["provider"]
backend = load_backend(strategy, backend_name, redirect_uri=redirect_uri)
authorization_url = backend.auth_url()
return Response(data={"authorization_url": authorization_url})
## Instruction:
Fix for Friendly tips when Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS
## Code After:
from rest_framework import generics, permissions, status
from rest_framework.response import Response
from social_django.utils import load_backend, load_strategy
from djoser.conf import settings
from djoser.social.serializers import ProviderAuthSerializer
class ProviderAuthView(generics.CreateAPIView):
permission_classes = [permissions.AllowAny]
serializer_class = ProviderAuthSerializer
def get(self, request, *args, **kwargs):
redirect_uri = request.GET.get("redirect_uri")
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
return Response("Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS", status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
strategy.session_set("redirect_uri", redirect_uri)
backend_name = self.kwargs["provider"]
backend = load_backend(strategy, backend_name, redirect_uri=redirect_uri)
authorization_url = backend.auth_url()
return Response(data={"authorization_url": authorization_url})
|
...
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
return Response("Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS", status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
...
|
d2e52377f90c81365bd0ff62c8bea95207b44328
|
indra/sources/sofia/api.py
|
indra/sources/sofia/api.py
|
import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
|
import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
try:
rel_sheet = book['Relations']
except Exception as e:
rel_sheet = book['Causal']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
|
Handle Causal and Relations worksheets
|
Handle Causal and Relations worksheets
|
Python
|
bsd-2-clause
|
pvtodorov/indra,bgyori/indra,sorgerlab/indra,sorgerlab/indra,johnbachman/indra,pvtodorov/indra,johnbachman/indra,johnbachman/belpy,bgyori/indra,johnbachman/belpy,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra
|
import openpyxl
from .processor import SofiaProcessor
+
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
+ try:
- rel_sheet = book['Relations']
+ rel_sheet = book['Relations']
+ except Exception as e:
+ rel_sheet = book['Causal']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
|
Handle Causal and Relations worksheets
|
## Code Before:
import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
## Instruction:
Handle Causal and Relations worksheets
## Code After:
import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
try:
rel_sheet = book['Relations']
except Exception as e:
rel_sheet = book['Causal']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
|
// ... existing code ...
from .processor import SofiaProcessor
// ... modified code ...
book = openpyxl.load_workbook(fname, read_only=True)
try:
rel_sheet = book['Relations']
except Exception as e:
rel_sheet = book['Causal']
event_sheet = book['Events']
// ... rest of the code ...
|
f87f2ec4707bcf851e00ff58bbfe43f4d7523606
|
scripts/dnz-fetch.py
|
scripts/dnz-fetch.py
|
import os
import math
import json
from pprint import pprint
from pydnz import Dnz
dnz = Dnz(os.environ.get('DNZ_KEY'))
results = []
def dnz_request(page=1):
filters = {
'category': ['Images'],
'year': ['2005+TO+2006']
}
fields = ['id', 'date']
return dnz.search('', _and=filters, per_page=100, page=page, fields=fields)
# First request.
first_result = dnz_request()
results = first_result.records
iterations = math.ceil(first_result.result_count / 100)
# iterations = 5
# Subsequent requests.
for i in range(2, iterations + 1):
records = dnz_request(i).records
for record in records:
results.append({
'id': record['id'],
'date': record['date']
})
pprint(len(results))
with open('dnz-2015.json', 'w') as outfile:
json.dump(results, outfile)
|
import os
# import math
import json
from pprint import pprint
from datetime import date
from pydnz import Dnz
dnz_api = Dnz(os.environ.get('DNZ_KEY'))
YEAR_INTERVAL = 10
def request_dnz_records(timespan, page):
parameters = {
'_and': {
'category': ['Images'],
'year': [timespan]
},
'per_page': 100,
'page': page,
'fields': [
'id',
'date'
]
}
return dnz_api.search('', **parameters)
def format_timespan(year1, year2):
return '{y1}+TO+{y2}'.format(y1=year1, y2=year2)
def fetch_timespan(timespan):
pprint('Fetching ' + timespan)
first_result = request_dnz_records(timespan, 1)
store_results(first_result.records)
pprint(first_result.result_count)
# iterations = math.ceil(first_result.result_count / 100)
iterations = 1
# Subsequent requests.
for i in range(2, iterations + 1):
records = request_dnz_records(i).records
store_results(records)
pprint(len(results))
def store_results(records):
for record in records:
results.append({
'id': record['id'],
'date': record['date']
})
if __name__ == '__main__':
results = []
present = date.today().year
past = present - YEAR_INTERVAL
years = [y for y in range(2005, 2006)]
timespans = [format_timespan(y, y + 1) for y in years]
for timespan in timespans:
fetch_timespan(timespan)
with open('dnz-records.json', 'w') as outfile:
json.dump(results, outfile)
|
Refactor DNZ fetch script before overhaul
|
Refactor DNZ fetch script before overhaul
|
Python
|
mit
|
judsonsam/tekautoday,judsonsam/tekautoday,judsonsam/tekautoday,judsonsam/tekautoday
|
import os
- import math
+ # import math
import json
from pprint import pprint
+ from datetime import date
from pydnz import Dnz
- dnz = Dnz(os.environ.get('DNZ_KEY'))
+ dnz_api = Dnz(os.environ.get('DNZ_KEY'))
+ YEAR_INTERVAL = 10
-
- results = []
- def dnz_request(page=1):
- filters = {
+ def request_dnz_records(timespan, page):
+ parameters = {
+ '_and': {
- 'category': ['Images'],
+ 'category': ['Images'],
- 'year': ['2005+TO+2006']
+ 'year': [timespan]
+ },
+ 'per_page': 100,
+ 'page': page,
+ 'fields': [
+ 'id',
+ 'date'
+ ]
}
- fields = ['id', 'date']
- return dnz.search('', _and=filters, per_page=100, page=page, fields=fields)
+ return dnz_api.search('', **parameters)
- # First request.
- first_result = dnz_request()
- results = first_result.records
- iterations = math.ceil(first_result.result_count / 100)
- # iterations = 5
+ def format_timespan(year1, year2):
+ return '{y1}+TO+{y2}'.format(y1=year1, y2=year2)
+
+ def fetch_timespan(timespan):
+ pprint('Fetching ' + timespan)
+
+ first_result = request_dnz_records(timespan, 1)
+ store_results(first_result.records)
+
+ pprint(first_result.result_count)
+
+ # iterations = math.ceil(first_result.result_count / 100)
+ iterations = 1
+
- # Subsequent requests.
+ # Subsequent requests.
- for i in range(2, iterations + 1):
+ for i in range(2, iterations + 1):
- records = dnz_request(i).records
+ records = request_dnz_records(i).records
+ store_results(records)
+ pprint(len(results))
+
+
+ def store_results(records):
for record in records:
results.append({
'id': record['id'],
'date': record['date']
})
- pprint(len(results))
- with open('dnz-2015.json', 'w') as outfile:
- json.dump(results, outfile)
+ if __name__ == '__main__':
+ results = []
+ present = date.today().year
+ past = present - YEAR_INTERVAL
+ years = [y for y in range(2005, 2006)]
+ timespans = [format_timespan(y, y + 1) for y in years]
+
+ for timespan in timespans:
+ fetch_timespan(timespan)
+
+ with open('dnz-records.json', 'w') as outfile:
+ json.dump(results, outfile)
+
|
Refactor DNZ fetch script before overhaul
|
## Code Before:
import os
import math
import json
from pprint import pprint
from pydnz import Dnz
dnz = Dnz(os.environ.get('DNZ_KEY'))
results = []
def dnz_request(page=1):
filters = {
'category': ['Images'],
'year': ['2005+TO+2006']
}
fields = ['id', 'date']
return dnz.search('', _and=filters, per_page=100, page=page, fields=fields)
# First request.
first_result = dnz_request()
results = first_result.records
iterations = math.ceil(first_result.result_count / 100)
# iterations = 5
# Subsequent requests.
for i in range(2, iterations + 1):
records = dnz_request(i).records
for record in records:
results.append({
'id': record['id'],
'date': record['date']
})
pprint(len(results))
with open('dnz-2015.json', 'w') as outfile:
json.dump(results, outfile)
## Instruction:
Refactor DNZ fetch script before overhaul
## Code After:
import os
# import math
import json
from pprint import pprint
from datetime import date
from pydnz import Dnz
dnz_api = Dnz(os.environ.get('DNZ_KEY'))
YEAR_INTERVAL = 10
def request_dnz_records(timespan, page):
parameters = {
'_and': {
'category': ['Images'],
'year': [timespan]
},
'per_page': 100,
'page': page,
'fields': [
'id',
'date'
]
}
return dnz_api.search('', **parameters)
def format_timespan(year1, year2):
return '{y1}+TO+{y2}'.format(y1=year1, y2=year2)
def fetch_timespan(timespan):
pprint('Fetching ' + timespan)
first_result = request_dnz_records(timespan, 1)
store_results(first_result.records)
pprint(first_result.result_count)
# iterations = math.ceil(first_result.result_count / 100)
iterations = 1
# Subsequent requests.
for i in range(2, iterations + 1):
records = request_dnz_records(i).records
store_results(records)
pprint(len(results))
def store_results(records):
for record in records:
results.append({
'id': record['id'],
'date': record['date']
})
if __name__ == '__main__':
results = []
present = date.today().year
past = present - YEAR_INTERVAL
years = [y for y in range(2005, 2006)]
timespans = [format_timespan(y, y + 1) for y in years]
for timespan in timespans:
fetch_timespan(timespan)
with open('dnz-records.json', 'w') as outfile:
json.dump(results, outfile)
|
# ... existing code ...
import os
# import math
import json
# ... modified code ...
from pprint import pprint
from datetime import date
from pydnz import Dnz
...
dnz_api = Dnz(os.environ.get('DNZ_KEY'))
YEAR_INTERVAL = 10
...
def request_dnz_records(timespan, page):
parameters = {
'_and': {
'category': ['Images'],
'year': [timespan]
},
'per_page': 100,
'page': page,
'fields': [
'id',
'date'
]
}
return dnz_api.search('', **parameters)
def format_timespan(year1, year2):
return '{y1}+TO+{y2}'.format(y1=year1, y2=year2)
def fetch_timespan(timespan):
pprint('Fetching ' + timespan)
first_result = request_dnz_records(timespan, 1)
store_results(first_result.records)
pprint(first_result.result_count)
# iterations = math.ceil(first_result.result_count / 100)
iterations = 1
# Subsequent requests.
for i in range(2, iterations + 1):
records = request_dnz_records(i).records
store_results(records)
pprint(len(results))
def store_results(records):
for record in records:
...
})
if __name__ == '__main__':
results = []
present = date.today().year
past = present - YEAR_INTERVAL
years = [y for y in range(2005, 2006)]
timespans = [format_timespan(y, y + 1) for y in years]
for timespan in timespans:
fetch_timespan(timespan)
with open('dnz-records.json', 'w') as outfile:
json.dump(results, outfile)
# ... rest of the code ...
|
e7a4402736518ae27cc87d4cdb22d411de2fc301
|
packages/mono.py
|
packages/mono.py
|
class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '2.10',
sources = [
'http://ftp.novell.com/pub/%{name}/sources/%{name}/%{name}-%{version}.tar.bz2',
'patches/mono-runtime-relocation.patch'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-quiet-build'
]
)
# Mono (in libgc) likes to fail to build randomly
self.make = 'for((i=0;i<20;i++)); do make && break; done'
# def prep (self):
# Package.prep (self)
# self.sh ('patch -p1 < "%{sources[1]}"')
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
|
class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '2.10',
sources = [
'http://ftp.novell.com/pub/%{name}/sources/%{name}/%{name}-%{version}.tar.bz2',
'patches/mono-runtime-relocation.patch'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-quiet-build'
]
)
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
# def prep (self):
# Package.prep (self)
# self.sh ('patch -p1 < "%{sources[1]}"')
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
|
Fix shell syntax for non bash shells
|
Fix shell syntax for non bash shells
The custom make command in mono.py is executed with the default shell,
which on some systems doesn't support the fancy for loop syntax, like
dash on Ubuntu.
|
Python
|
mit
|
mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild,bl8/bockbuild
|
class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '2.10',
sources = [
'http://ftp.novell.com/pub/%{name}/sources/%{name}/%{name}-%{version}.tar.bz2',
'patches/mono-runtime-relocation.patch'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-quiet-build'
]
)
# Mono (in libgc) likes to fail to build randomly
- self.make = 'for((i=0;i<20;i++)); do make && break; done'
+ self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
# def prep (self):
# Package.prep (self)
# self.sh ('patch -p1 < "%{sources[1]}"')
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
|
Fix shell syntax for non bash shells
|
## Code Before:
class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '2.10',
sources = [
'http://ftp.novell.com/pub/%{name}/sources/%{name}/%{name}-%{version}.tar.bz2',
'patches/mono-runtime-relocation.patch'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-quiet-build'
]
)
# Mono (in libgc) likes to fail to build randomly
self.make = 'for((i=0;i<20;i++)); do make && break; done'
# def prep (self):
# Package.prep (self)
# self.sh ('patch -p1 < "%{sources[1]}"')
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
## Instruction:
Fix shell syntax for non bash shells
## Code After:
class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '2.10',
sources = [
'http://ftp.novell.com/pub/%{name}/sources/%{name}/%{name}-%{version}.tar.bz2',
'patches/mono-runtime-relocation.patch'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-quiet-build'
]
)
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
# def prep (self):
# Package.prep (self)
# self.sh ('patch -p1 < "%{sources[1]}"')
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
|
...
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
...
|
ca6d0c5f0fc61ce7d939e49f276c36c5cb12a589
|
backend/globaleaks/tests/utils/test_zipstream.py
|
backend/globaleaks/tests/utils/test_zipstream.py
|
import os
from zipfile import ZipFile
from twisted.internet.defer import inlineCallbacks
from globaleaks.db.datainit import load_appdata
from globaleaks.settings import GLSettings
from globaleaks.tests import helpers
from globaleaks.utils.zipstream import ZipStream, get_compression_opts
class TestCollection(helpers.TestGL):
files = []
@inlineCallbacks
def setUp(self):
yield helpers.TestGL.setUp(self)
self.test_collection_file = os.path.join(GLSettings.working_path, 'test.collection')
for k in self.internationalized_text:
self.files.append({'name': self.internationalized_text[k].encode('utf8'), 'buf': self.internationalized_text[k].encode('utf-8')})
def test_collection(self):
for compression in ['zipstored', 'zipdeflated']:
with open(self.test_collection_file, 'w') as f:
opts = get_compression_opts(compression)
for data in ZipStream(self.files, opts['compression_type']):
f.write(data)
with ZipFile(self.test_collection_file, 'r') as f:
self.assertIsNone(f.testzip())
|
import os
from zipfile import ZipFile
from twisted.internet.defer import inlineCallbacks
from globaleaks.db.datainit import load_appdata
from globaleaks.settings import GLSettings
from globaleaks.tests import helpers
from globaleaks.utils.zipstream import ZipStream
class TestCollection(helpers.TestGL):
files = []
@inlineCallbacks
def setUp(self):
yield helpers.TestGL.setUp(self)
self.test_collection_file = os.path.join(GLSettings.working_path, 'test.collection')
for k in self.internationalized_text:
self.files.append({'name': self.internationalized_text[k].encode('utf8'), 'buf': self.internationalized_text[k].encode('utf-8')})
def test_collection(self):
with open(self.test_collection_file, 'w') as f:
for data in ZipStream(self.files):
f.write(data)
with ZipFile(self.test_collection_file, 'r') as f:
self.assertIsNone(f.testzip())
|
Simplify zipstream following the simplification of the zip routines implemented
|
Simplify zipstream following the simplification of the zip routines implemented
|
Python
|
agpl-3.0
|
vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks
|
import os
from zipfile import ZipFile
from twisted.internet.defer import inlineCallbacks
from globaleaks.db.datainit import load_appdata
from globaleaks.settings import GLSettings
from globaleaks.tests import helpers
- from globaleaks.utils.zipstream import ZipStream, get_compression_opts
+ from globaleaks.utils.zipstream import ZipStream
class TestCollection(helpers.TestGL):
files = []
@inlineCallbacks
def setUp(self):
yield helpers.TestGL.setUp(self)
self.test_collection_file = os.path.join(GLSettings.working_path, 'test.collection')
for k in self.internationalized_text:
self.files.append({'name': self.internationalized_text[k].encode('utf8'), 'buf': self.internationalized_text[k].encode('utf-8')})
def test_collection(self):
- for compression in ['zipstored', 'zipdeflated']:
- with open(self.test_collection_file, 'w') as f:
+ with open(self.test_collection_file, 'w') as f:
- opts = get_compression_opts(compression)
- for data in ZipStream(self.files, opts['compression_type']):
+ for data in ZipStream(self.files):
- f.write(data)
+ f.write(data)
- with ZipFile(self.test_collection_file, 'r') as f:
+ with ZipFile(self.test_collection_file, 'r') as f:
- self.assertIsNone(f.testzip())
+ self.assertIsNone(f.testzip())
|
Simplify zipstream following the simplification of the zip routines implemented
|
## Code Before:
import os
from zipfile import ZipFile
from twisted.internet.defer import inlineCallbacks
from globaleaks.db.datainit import load_appdata
from globaleaks.settings import GLSettings
from globaleaks.tests import helpers
from globaleaks.utils.zipstream import ZipStream, get_compression_opts
class TestCollection(helpers.TestGL):
files = []
@inlineCallbacks
def setUp(self):
yield helpers.TestGL.setUp(self)
self.test_collection_file = os.path.join(GLSettings.working_path, 'test.collection')
for k in self.internationalized_text:
self.files.append({'name': self.internationalized_text[k].encode('utf8'), 'buf': self.internationalized_text[k].encode('utf-8')})
def test_collection(self):
for compression in ['zipstored', 'zipdeflated']:
with open(self.test_collection_file, 'w') as f:
opts = get_compression_opts(compression)
for data in ZipStream(self.files, opts['compression_type']):
f.write(data)
with ZipFile(self.test_collection_file, 'r') as f:
self.assertIsNone(f.testzip())
## Instruction:
Simplify zipstream following the simplification of the zip routines implemented
## Code After:
import os
from zipfile import ZipFile
from twisted.internet.defer import inlineCallbacks
from globaleaks.db.datainit import load_appdata
from globaleaks.settings import GLSettings
from globaleaks.tests import helpers
from globaleaks.utils.zipstream import ZipStream
class TestCollection(helpers.TestGL):
files = []
@inlineCallbacks
def setUp(self):
yield helpers.TestGL.setUp(self)
self.test_collection_file = os.path.join(GLSettings.working_path, 'test.collection')
for k in self.internationalized_text:
self.files.append({'name': self.internationalized_text[k].encode('utf8'), 'buf': self.internationalized_text[k].encode('utf-8')})
def test_collection(self):
with open(self.test_collection_file, 'w') as f:
for data in ZipStream(self.files):
f.write(data)
with ZipFile(self.test_collection_file, 'r') as f:
self.assertIsNone(f.testzip())
|
...
from globaleaks.tests import helpers
from globaleaks.utils.zipstream import ZipStream
...
def test_collection(self):
with open(self.test_collection_file, 'w') as f:
for data in ZipStream(self.files):
f.write(data)
with ZipFile(self.test_collection_file, 'r') as f:
self.assertIsNone(f.testzip())
...
|
1ce899d118b3d46a816c0fc5f2f1a6f0ca9670ed
|
addons/resource/models/res_company.py
|
addons/resource/models/res_company.py
|
from odoo import api, fields, models, _
class ResCompany(models.Model):
_inherit = 'res.company'
resource_calendar_ids = fields.One2many(
'resource.calendar', 'company_id', 'Working Hours')
resource_calendar_id = fields.Many2one(
'resource.calendar', 'Default Working Hours', ondelete='restrict')
@api.model
def _init_data_resource_calendar(self):
for company in self.search([('resource_calendar_id', '=', False)]):
company.resource_calendar_id = self.env['resource.calendar'].create({'name': _('Standard 40 hours/week')}).id
@api.model
def create(self, values):
if not values.get('resource_calendar_id'):
values['resource_calendar_id'] = self.env['resource.calendar'].sudo().create({'name': _('Standard 40 hours/week')}).id
company = super(ResCompany, self).create(values)
# calendar created from form view: no company_id set because record was still not created
if not company.resource_calendar_id.company_id:
company.resource_calendar_id.company_id = company.id
return company
|
from odoo import api, fields, models, _
class ResCompany(models.Model):
_inherit = 'res.company'
resource_calendar_ids = fields.One2many(
'resource.calendar', 'company_id', 'Working Hours')
resource_calendar_id = fields.Many2one(
'resource.calendar', 'Default Working Hours', ondelete='restrict')
@api.model
def _init_data_resource_calendar(self):
self.search([('resource_calendar_id', '=', False)])._create_resource_calendar()
def _create_resource_calendar(self):
for company in self:
company.resource_calendar_id = self.env['resource.calendar'].create({
'name': _('Standard 40 hours/week'),
'company_id': company.id
}).id
@api.model
def create(self, values):
company = super(ResCompany, self).create(values)
if not company.resource_calendar_id:
company.sudo()._create_resource_calendar()
# calendar created from form view: no company_id set because record was still not created
if not company.resource_calendar_id.company_id:
company.resource_calendar_id.company_id = company.id
return company
|
Set company_id on a resource.calendar on company creation
|
[IMP] resource: Set company_id on a resource.calendar on company creation
Purpose
=======
Currently, when creating a company, the resource calendar is created if not specified.
This lead to duplicated data. In Manufacturing > Configuration > Working Time, two same working time demo data('Standard 40 Hours/Week')
Specification
=============
Company should be correctly set in the calendar.
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
from odoo import api, fields, models, _
class ResCompany(models.Model):
_inherit = 'res.company'
resource_calendar_ids = fields.One2many(
'resource.calendar', 'company_id', 'Working Hours')
resource_calendar_id = fields.Many2one(
'resource.calendar', 'Default Working Hours', ondelete='restrict')
@api.model
def _init_data_resource_calendar(self):
- for company in self.search([('resource_calendar_id', '=', False)]):
+ self.search([('resource_calendar_id', '=', False)])._create_resource_calendar()
+
+ def _create_resource_calendar(self):
+ for company in self:
- company.resource_calendar_id = self.env['resource.calendar'].create({'name': _('Standard 40 hours/week')}).id
+ company.resource_calendar_id = self.env['resource.calendar'].create({
+ 'name': _('Standard 40 hours/week'),
+ 'company_id': company.id
+ }).id
@api.model
def create(self, values):
- if not values.get('resource_calendar_id'):
- values['resource_calendar_id'] = self.env['resource.calendar'].sudo().create({'name': _('Standard 40 hours/week')}).id
company = super(ResCompany, self).create(values)
+ if not company.resource_calendar_id:
+ company.sudo()._create_resource_calendar()
# calendar created from form view: no company_id set because record was still not created
if not company.resource_calendar_id.company_id:
company.resource_calendar_id.company_id = company.id
return company
|
Set company_id on a resource.calendar on company creation
|
## Code Before:
from odoo import api, fields, models, _
class ResCompany(models.Model):
_inherit = 'res.company'
resource_calendar_ids = fields.One2many(
'resource.calendar', 'company_id', 'Working Hours')
resource_calendar_id = fields.Many2one(
'resource.calendar', 'Default Working Hours', ondelete='restrict')
@api.model
def _init_data_resource_calendar(self):
for company in self.search([('resource_calendar_id', '=', False)]):
company.resource_calendar_id = self.env['resource.calendar'].create({'name': _('Standard 40 hours/week')}).id
@api.model
def create(self, values):
if not values.get('resource_calendar_id'):
values['resource_calendar_id'] = self.env['resource.calendar'].sudo().create({'name': _('Standard 40 hours/week')}).id
company = super(ResCompany, self).create(values)
# calendar created from form view: no company_id set because record was still not created
if not company.resource_calendar_id.company_id:
company.resource_calendar_id.company_id = company.id
return company
## Instruction:
Set company_id on a resource.calendar on company creation
## Code After:
from odoo import api, fields, models, _
class ResCompany(models.Model):
_inherit = 'res.company'
resource_calendar_ids = fields.One2many(
'resource.calendar', 'company_id', 'Working Hours')
resource_calendar_id = fields.Many2one(
'resource.calendar', 'Default Working Hours', ondelete='restrict')
@api.model
def _init_data_resource_calendar(self):
self.search([('resource_calendar_id', '=', False)])._create_resource_calendar()
def _create_resource_calendar(self):
for company in self:
company.resource_calendar_id = self.env['resource.calendar'].create({
'name': _('Standard 40 hours/week'),
'company_id': company.id
}).id
@api.model
def create(self, values):
company = super(ResCompany, self).create(values)
if not company.resource_calendar_id:
company.sudo()._create_resource_calendar()
# calendar created from form view: no company_id set because record was still not created
if not company.resource_calendar_id.company_id:
company.resource_calendar_id.company_id = company.id
return company
|
# ... existing code ...
def _init_data_resource_calendar(self):
self.search([('resource_calendar_id', '=', False)])._create_resource_calendar()
def _create_resource_calendar(self):
for company in self:
company.resource_calendar_id = self.env['resource.calendar'].create({
'name': _('Standard 40 hours/week'),
'company_id': company.id
}).id
# ... modified code ...
def create(self, values):
company = super(ResCompany, self).create(values)
if not company.resource_calendar_id:
company.sudo()._create_resource_calendar()
# calendar created from form view: no company_id set because record was still not created
# ... rest of the code ...
|
afac07ce173af3e7db4a6ba6dab4786903e217b7
|
ocradmin/ocr/tools/plugins/cuneiform_wrapper.py
|
ocradmin/ocr/tools/plugins/cuneiform_wrapper.py
|
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
|
import tempfile
import subprocess as sp
from ocradmin.ocr.tools import check_aborted, set_progress
from ocradmin.ocr.utils import HocrParser
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
def convert(self, filepath, *args, **kwargs):
"""
Convert a full page.
"""
json = None
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.close()
args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
self.logger.info(args)
proc = sp.Popen(args, stderr=sp.PIPE)
err = proc.stderr.read()
if proc.wait() != 0:
return "!!! %s CONVERSION ERROR %d: %s !!!" % (
os.path.basename(self.binary).upper(),
proc.returncode, err)
json = HocrParser().parsefile(tmp.name)
self.logger.info("%s" % json)
os.unlink(tmp.name)
set_progress(self.logger, kwargs["progress_func"], 100, 100)
return json
|
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
|
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
+ import tempfile
+ import subprocess as sp
+ from ocradmin.ocr.tools import check_aborted, set_progress
+ from ocradmin.ocr.utils import HocrParser
+ from generic_wrapper import *
- from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
+ def convert(self, filepath, *args, **kwargs):
+ """
+ Convert a full page.
+ """
+ json = None
+ with tempfile.NamedTemporaryFile(delete=False) as tmp:
+ tmp.close()
+ args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
+ self.logger.info(args)
+ proc = sp.Popen(args, stderr=sp.PIPE)
+ err = proc.stderr.read()
+ if proc.wait() != 0:
+ return "!!! %s CONVERSION ERROR %d: %s !!!" % (
+ os.path.basename(self.binary).upper(),
+ proc.returncode, err)
+ json = HocrParser().parsefile(tmp.name)
+ self.logger.info("%s" % json)
+ os.unlink(tmp.name)
+ set_progress(self.logger, kwargs["progress_func"], 100, 100)
+ return json
|
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
|
## Code Before:
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
## Instruction:
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
## Code After:
import tempfile
import subprocess as sp
from ocradmin.ocr.tools import check_aborted, set_progress
from ocradmin.ocr.utils import HocrParser
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
def convert(self, filepath, *args, **kwargs):
"""
Convert a full page.
"""
json = None
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.close()
args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
self.logger.info(args)
proc = sp.Popen(args, stderr=sp.PIPE)
err = proc.stderr.read()
if proc.wait() != 0:
return "!!! %s CONVERSION ERROR %d: %s !!!" % (
os.path.basename(self.binary).upper(),
proc.returncode, err)
json = HocrParser().parsefile(tmp.name)
self.logger.info("%s" % json)
os.unlink(tmp.name)
set_progress(self.logger, kwargs["progress_func"], 100, 100)
return json
|
# ... existing code ...
import tempfile
import subprocess as sp
from ocradmin.ocr.tools import check_aborted, set_progress
from ocradmin.ocr.utils import HocrParser
from generic_wrapper import *
# ... modified code ...
def convert(self, filepath, *args, **kwargs):
"""
Convert a full page.
"""
json = None
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.close()
args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
self.logger.info(args)
proc = sp.Popen(args, stderr=sp.PIPE)
err = proc.stderr.read()
if proc.wait() != 0:
return "!!! %s CONVERSION ERROR %d: %s !!!" % (
os.path.basename(self.binary).upper(),
proc.returncode, err)
json = HocrParser().parsefile(tmp.name)
self.logger.info("%s" % json)
os.unlink(tmp.name)
set_progress(self.logger, kwargs["progress_func"], 100, 100)
return json
# ... rest of the code ...
|
1e6fcb134f55cb70ddd394a051a86c45aa50c944
|
cli_helpers/tabular_output/tabulate_adapter.py
|
cli_helpers/tabular_output/tabulate_adapter.py
|
from cli_helpers.packages import tabulate
from .preprocessors import bytes_to_string, align_decimals
tabulate.PRESERVE_WHITESPACE = True
supported_markup_formats = ('mediawiki', 'html', 'latex', 'latex_booktabs',
'textile', 'moinmoin', 'jira')
supported_table_formats = ('plain', 'simple', 'grid', 'fancy_grid', 'pipe',
'orgtbl', 'psql', 'rst')
supported_formats = supported_markup_formats + supported_table_formats
preprocessors = (bytes_to_string, align_decimals)
def adapter(data, headers, table_format=None, missing_value='', **_):
"""Wrap tabulate inside a function for TabularOutputFormatter."""
kwargs = {'tablefmt': table_format, 'missingval': missing_value,
'disable_numparse': True}
if table_format in supported_markup_formats:
kwargs.update(numalign=None, stralign=None)
return tabulate.tabulate(data, headers, **kwargs)
|
from cli_helpers.packages import tabulate
from .preprocessors import bytes_to_string, align_decimals
supported_markup_formats = ('mediawiki', 'html', 'latex', 'latex_booktabs',
'textile', 'moinmoin', 'jira')
supported_table_formats = ('plain', 'simple', 'grid', 'fancy_grid', 'pipe',
'orgtbl', 'psql', 'rst')
supported_formats = supported_markup_formats + supported_table_formats
preprocessors = (bytes_to_string, align_decimals)
def adapter(data, headers, table_format=None, missing_value='',
disable_numparse=True, preserve_whitespace=True, **_):
"""Wrap tabulate inside a function for TabularOutputFormatter."""
kwargs = {'tablefmt': table_format, 'missingval': missing_value,
'disable_numparse': disable_numparse}
if table_format in supported_markup_formats:
kwargs.update(numalign=None, stralign=None)
tabulate.PRESERVE_WHITESPACE = preserve_whitespace
return tabulate.tabulate(data, headers, **kwargs)
|
Make whitespace and numparse configurable.
|
Make whitespace and numparse configurable.
|
Python
|
bsd-3-clause
|
dbcli/cli_helpers,dbcli/cli_helpers
|
from cli_helpers.packages import tabulate
from .preprocessors import bytes_to_string, align_decimals
-
- tabulate.PRESERVE_WHITESPACE = True
supported_markup_formats = ('mediawiki', 'html', 'latex', 'latex_booktabs',
'textile', 'moinmoin', 'jira')
supported_table_formats = ('plain', 'simple', 'grid', 'fancy_grid', 'pipe',
'orgtbl', 'psql', 'rst')
supported_formats = supported_markup_formats + supported_table_formats
preprocessors = (bytes_to_string, align_decimals)
- def adapter(data, headers, table_format=None, missing_value='', **_):
+ def adapter(data, headers, table_format=None, missing_value='',
+ disable_numparse=True, preserve_whitespace=True, **_):
"""Wrap tabulate inside a function for TabularOutputFormatter."""
kwargs = {'tablefmt': table_format, 'missingval': missing_value,
- 'disable_numparse': True}
+ 'disable_numparse': disable_numparse}
if table_format in supported_markup_formats:
kwargs.update(numalign=None, stralign=None)
+ tabulate.PRESERVE_WHITESPACE = preserve_whitespace
+
return tabulate.tabulate(data, headers, **kwargs)
|
Make whitespace and numparse configurable.
|
## Code Before:
from cli_helpers.packages import tabulate
from .preprocessors import bytes_to_string, align_decimals
tabulate.PRESERVE_WHITESPACE = True
supported_markup_formats = ('mediawiki', 'html', 'latex', 'latex_booktabs',
'textile', 'moinmoin', 'jira')
supported_table_formats = ('plain', 'simple', 'grid', 'fancy_grid', 'pipe',
'orgtbl', 'psql', 'rst')
supported_formats = supported_markup_formats + supported_table_formats
preprocessors = (bytes_to_string, align_decimals)
def adapter(data, headers, table_format=None, missing_value='', **_):
"""Wrap tabulate inside a function for TabularOutputFormatter."""
kwargs = {'tablefmt': table_format, 'missingval': missing_value,
'disable_numparse': True}
if table_format in supported_markup_formats:
kwargs.update(numalign=None, stralign=None)
return tabulate.tabulate(data, headers, **kwargs)
## Instruction:
Make whitespace and numparse configurable.
## Code After:
from cli_helpers.packages import tabulate
from .preprocessors import bytes_to_string, align_decimals
supported_markup_formats = ('mediawiki', 'html', 'latex', 'latex_booktabs',
'textile', 'moinmoin', 'jira')
supported_table_formats = ('plain', 'simple', 'grid', 'fancy_grid', 'pipe',
'orgtbl', 'psql', 'rst')
supported_formats = supported_markup_formats + supported_table_formats
preprocessors = (bytes_to_string, align_decimals)
def adapter(data, headers, table_format=None, missing_value='',
disable_numparse=True, preserve_whitespace=True, **_):
"""Wrap tabulate inside a function for TabularOutputFormatter."""
kwargs = {'tablefmt': table_format, 'missingval': missing_value,
'disable_numparse': disable_numparse}
if table_format in supported_markup_formats:
kwargs.update(numalign=None, stralign=None)
tabulate.PRESERVE_WHITESPACE = preserve_whitespace
return tabulate.tabulate(data, headers, **kwargs)
|
...
from .preprocessors import bytes_to_string, align_decimals
...
def adapter(data, headers, table_format=None, missing_value='',
disable_numparse=True, preserve_whitespace=True, **_):
"""Wrap tabulate inside a function for TabularOutputFormatter."""
...
kwargs = {'tablefmt': table_format, 'missingval': missing_value,
'disable_numparse': disable_numparse}
if table_format in supported_markup_formats:
...
tabulate.PRESERVE_WHITESPACE = preserve_whitespace
return tabulate.tabulate(data, headers, **kwargs)
...
|
38775f06c2285f3d12b9f4a0bc70bded29dce274
|
hbmqtt/utils.py
|
hbmqtt/utils.py
|
def not_in_dict_or_none(dict, key):
"""
Check if a key exists in a map and if it's not None
:param dict: map to look for key
:param key: key to find
:return: true if key is in dict and not None
"""
if key not in dict or dict[key] is None:
return True
else:
return False
|
def not_in_dict_or_none(dict, key):
"""
Check if a key exists in a map and if it's not None
:param dict: map to look for key
:param key: key to find
:return: true if key is in dict and not None
"""
if key not in dict or dict[key] is None:
return True
else:
return False
def format_client_message(session=None, address=None, port=None, id=None):
if session:
return "(client @=%s:%d id=%s)" % (session.remote_address, session.remote_port, session.client_id)
else:
return "(client @=%s:%d id=%s)" % (address, port, id)
|
Add method for formatting client info (address, port, id)
|
Add method for formatting client info (address, port, id)
|
Python
|
mit
|
beerfactory/hbmqtt
|
def not_in_dict_or_none(dict, key):
"""
Check if a key exists in a map and if it's not None
:param dict: map to look for key
:param key: key to find
:return: true if key is in dict and not None
"""
if key not in dict or dict[key] is None:
return True
else:
return False
+
+
+ def format_client_message(session=None, address=None, port=None, id=None):
+ if session:
+ return "(client @=%s:%d id=%s)" % (session.remote_address, session.remote_port, session.client_id)
+ else:
+ return "(client @=%s:%d id=%s)" % (address, port, id)
+
|
Add method for formatting client info (address, port, id)
|
## Code Before:
def not_in_dict_or_none(dict, key):
"""
Check if a key exists in a map and if it's not None
:param dict: map to look for key
:param key: key to find
:return: true if key is in dict and not None
"""
if key not in dict or dict[key] is None:
return True
else:
return False
## Instruction:
Add method for formatting client info (address, port, id)
## Code After:
def not_in_dict_or_none(dict, key):
"""
Check if a key exists in a map and if it's not None
:param dict: map to look for key
:param key: key to find
:return: true if key is in dict and not None
"""
if key not in dict or dict[key] is None:
return True
else:
return False
def format_client_message(session=None, address=None, port=None, id=None):
if session:
return "(client @=%s:%d id=%s)" % (session.remote_address, session.remote_port, session.client_id)
else:
return "(client @=%s:%d id=%s)" % (address, port, id)
|
# ... existing code ...
return False
def format_client_message(session=None, address=None, port=None, id=None):
if session:
return "(client @=%s:%d id=%s)" % (session.remote_address, session.remote_port, session.client_id)
else:
return "(client @=%s:%d id=%s)" % (address, port, id)
# ... rest of the code ...
|
53d5f47c828bec78e7241cb9e3d4f614dd18e6f9
|
responder.py
|
responder.py
|
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response("Hello world", mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
|
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response(self._excuse, mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
|
Fix bug with text/plain response
|
Fix bug with text/plain response
|
Python
|
mit
|
aaronbassett/Bad-Tools,aaronbassett/Bad-Tools,aaronbassett/Bad-Tools,aaronbassett/Bad-Tools,aaronbassett/Bad-Tools
|
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
- return Response("Hello world", mimetype='text/plain'), "/text/"
+ return Response(self._excuse, mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
|
Fix bug with text/plain response
|
## Code Before:
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response("Hello world", mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
## Instruction:
Fix bug with text/plain response
## Code After:
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response(self._excuse, mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
|
...
elif self.mime_type == "text/plain":
return Response(self._excuse, mimetype='text/plain'), "/text/"
...
|
5255a72c266f8ab092a02b6d87f7006f2149560e
|
vortaro/admin.py
|
vortaro/admin.py
|
from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["ending", "kind", "ofc", "begining", "language"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
|
from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["kind", "ofc", "language", "ending", "begining"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
|
Reorder filters to make the easier to use.
|
Reorder filters to make the easier to use.
|
Python
|
agpl-3.0
|
pupeno/bonvortaro
|
from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
- list_filter = ["ending", "kind", "ofc", "begining", "language"]
+ list_filter = ["kind", "ofc", "language", "ending", "begining"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
|
Reorder filters to make the easier to use.
|
## Code Before:
from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["ending", "kind", "ofc", "begining", "language"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
## Instruction:
Reorder filters to make the easier to use.
## Code After:
from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["kind", "ofc", "language", "ending", "begining"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
|
...
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["kind", "ofc", "language", "ending", "begining"]
...
|
caf0829191e9f3276fb144486ad602dcd482b60d
|
ignition/dsl/sfl/proteus_coefficient_printer.py
|
ignition/dsl/sfl/proteus_coefficient_printer.py
|
"""Generator for Proteus coefficient evaluator"""
from .sfl_printer import SFLPrinter
from ...code_tools import comment_code, indent_code, PythonCodePrinter
coefficient_header = """\
Proteus Coefficient file generated from Ignition
"""
class_header = """\
class %{class_name}s(TC_base):
"""
class ProteusCoefficientPrinter(SFLPrinter):
"""Generator for Proteus Coefficient evaluator"""
language = 'Python'
comment_str = '//'
block_comment_tuple = ('"""', '"""')
def print_file(self, indent=0):
ret_code = ""
ret_code += self._print_header(indent)
ret_code += PythonCodePrinter(self._generator.class_dag)
return ret_code
|
"""Generator for Proteus coefficient evaluator"""
from .sfl_printer import SFLPrinter
from ...code_tools import comment_code, indent_code, PythonCodePrinter
coefficient_header = """\
Proteus Coefficient file generated from Ignition
"""
class ProteusCoefficientPrinter(SFLPrinter):
"""Generator for Proteus Coefficient evaluator"""
language = 'Python'
comment_str = '//'
block_comment_tuple = ('"""', '"""\n')
def _print_header(self, indent):
return comment_code(indent_code(coefficient_header, indent),
block_comment=self.block_comment_tuple)
def print_file(self, indent=0):
ret_code = ""
ret_code += self._print_header(indent)
ret_code += PythonCodePrinter(self._generator.class_dag).code_str()
return ret_code
|
Print head, remove code for proteus python class head (use codeobj)
|
Print head, remove code for proteus python class head (use codeobj)
|
Python
|
bsd-3-clause
|
IgnitionProject/ignition,IgnitionProject/ignition,IgnitionProject/ignition
|
"""Generator for Proteus coefficient evaluator"""
from .sfl_printer import SFLPrinter
from ...code_tools import comment_code, indent_code, PythonCodePrinter
coefficient_header = """\
Proteus Coefficient file generated from Ignition
"""
- class_header = """\
- class %{class_name}s(TC_base):
- """
-
class ProteusCoefficientPrinter(SFLPrinter):
"""Generator for Proteus Coefficient evaluator"""
language = 'Python'
comment_str = '//'
- block_comment_tuple = ('"""', '"""')
+ block_comment_tuple = ('"""', '"""\n')
+
+ def _print_header(self, indent):
+ return comment_code(indent_code(coefficient_header, indent),
+ block_comment=self.block_comment_tuple)
def print_file(self, indent=0):
ret_code = ""
ret_code += self._print_header(indent)
- ret_code += PythonCodePrinter(self._generator.class_dag)
+ ret_code += PythonCodePrinter(self._generator.class_dag).code_str()
return ret_code
|
Print head, remove code for proteus python class head (use codeobj)
|
## Code Before:
"""Generator for Proteus coefficient evaluator"""
from .sfl_printer import SFLPrinter
from ...code_tools import comment_code, indent_code, PythonCodePrinter
coefficient_header = """\
Proteus Coefficient file generated from Ignition
"""
class_header = """\
class %{class_name}s(TC_base):
"""
class ProteusCoefficientPrinter(SFLPrinter):
"""Generator for Proteus Coefficient evaluator"""
language = 'Python'
comment_str = '//'
block_comment_tuple = ('"""', '"""')
def print_file(self, indent=0):
ret_code = ""
ret_code += self._print_header(indent)
ret_code += PythonCodePrinter(self._generator.class_dag)
return ret_code
## Instruction:
Print head, remove code for proteus python class head (use codeobj)
## Code After:
"""Generator for Proteus coefficient evaluator"""
from .sfl_printer import SFLPrinter
from ...code_tools import comment_code, indent_code, PythonCodePrinter
coefficient_header = """\
Proteus Coefficient file generated from Ignition
"""
class ProteusCoefficientPrinter(SFLPrinter):
"""Generator for Proteus Coefficient evaluator"""
language = 'Python'
comment_str = '//'
block_comment_tuple = ('"""', '"""\n')
def _print_header(self, indent):
return comment_code(indent_code(coefficient_header, indent),
block_comment=self.block_comment_tuple)
def print_file(self, indent=0):
ret_code = ""
ret_code += self._print_header(indent)
ret_code += PythonCodePrinter(self._generator.class_dag).code_str()
return ret_code
|
// ... existing code ...
// ... modified code ...
comment_str = '//'
block_comment_tuple = ('"""', '"""\n')
def _print_header(self, indent):
return comment_code(indent_code(coefficient_header, indent),
block_comment=self.block_comment_tuple)
...
ret_code += self._print_header(indent)
ret_code += PythonCodePrinter(self._generator.class_dag).code_str()
return ret_code
// ... rest of the code ...
|
b23ed2d6d74c4604e9bb7b55faf121661ee9f785
|
statePointsGen.py
|
statePointsGen.py
|
import csv
import argparse
import itertools
|
import csv
import argparse
import itertools
import string
numPoints=int(input('Number of points in analysis:'))
num2alpha = dict(zip(range(1, 27), string.ascii_uppercase))
outRow=['']
outRows=[]
paramsPoints=['mdot','h','T','P','s']
parser = argparse.ArgumentParser()
parser.add_argument('-o','--output',required=True)
args = parser.parse_args()
for point in range(1,numPoints+1):
outRow.append(num2alpha[point])
outRows.append(outRow)
for param in paramsPoints:
outRows.append([param])
with open(args.output, 'wt') as csvfile:
fileEqns = csv.writer(csvfile, delimiter=',',
quotechar="'", quoting=csv.QUOTE_MINIMAL)
for row in outRows:
fileEqns.writerow(row)
print("Output file: %s" % args.output)
|
Develop program for points file generation
|
Develop program for points file generation
|
Python
|
mit
|
ndebuhr/thermo-state-solver,ndebuhr/thermo-state-solver
|
import csv
import argparse
import itertools
+ import string
+ numPoints=int(input('Number of points in analysis:'))
+ num2alpha = dict(zip(range(1, 27), string.ascii_uppercase))
+ outRow=['']
+ outRows=[]
+ paramsPoints=['mdot','h','T','P','s']
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-o','--output',required=True)
+ args = parser.parse_args()
+
+ for point in range(1,numPoints+1):
+ outRow.append(num2alpha[point])
+ outRows.append(outRow)
+ for param in paramsPoints:
+ outRows.append([param])
+
+ with open(args.output, 'wt') as csvfile:
+ fileEqns = csv.writer(csvfile, delimiter=',',
+ quotechar="'", quoting=csv.QUOTE_MINIMAL)
+ for row in outRows:
+ fileEqns.writerow(row)
+
+ print("Output file: %s" % args.output)
+
|
Develop program for points file generation
|
## Code Before:
import csv
import argparse
import itertools
## Instruction:
Develop program for points file generation
## Code After:
import csv
import argparse
import itertools
import string
numPoints=int(input('Number of points in analysis:'))
num2alpha = dict(zip(range(1, 27), string.ascii_uppercase))
outRow=['']
outRows=[]
paramsPoints=['mdot','h','T','P','s']
parser = argparse.ArgumentParser()
parser.add_argument('-o','--output',required=True)
args = parser.parse_args()
for point in range(1,numPoints+1):
outRow.append(num2alpha[point])
outRows.append(outRow)
for param in paramsPoints:
outRows.append([param])
with open(args.output, 'wt') as csvfile:
fileEqns = csv.writer(csvfile, delimiter=',',
quotechar="'", quoting=csv.QUOTE_MINIMAL)
for row in outRows:
fileEqns.writerow(row)
print("Output file: %s" % args.output)
|
...
import itertools
import string
numPoints=int(input('Number of points in analysis:'))
num2alpha = dict(zip(range(1, 27), string.ascii_uppercase))
outRow=['']
outRows=[]
paramsPoints=['mdot','h','T','P','s']
parser = argparse.ArgumentParser()
parser.add_argument('-o','--output',required=True)
args = parser.parse_args()
for point in range(1,numPoints+1):
outRow.append(num2alpha[point])
outRows.append(outRow)
for param in paramsPoints:
outRows.append([param])
with open(args.output, 'wt') as csvfile:
fileEqns = csv.writer(csvfile, delimiter=',',
quotechar="'", quoting=csv.QUOTE_MINIMAL)
for row in outRows:
fileEqns.writerow(row)
print("Output file: %s" % args.output)
...
|
453497b0755d8bc2d6bd6ccc3830394e50ed9a07
|
pywikibot/families/outreach_family.py
|
pywikibot/families/outreach_family.py
|
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
def version(self, code):
return "1.24wmf6"
|
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
|
Update mw version 1.24wmf11 derived from super class
|
Update mw version 1.24wmf11 derived from super class
Change-Id: If142c57a88179f80e2e652e844c7aadbc2468f7c
|
Python
|
mit
|
trishnaguha/pywikibot-core,Darkdadaah/pywikibot-core,VcamX/pywikibot-core,magul/pywikibot-core,PersianWikipedia/pywikibot-core,magul/pywikibot-core,icyflame/batman,Darkdadaah/pywikibot-core,wikimedia/pywikibot-core,happy5214/pywikibot-core,TridevGuha/pywikibot-core,wikimedia/pywikibot-core,hasteur/g13bot_tools_new,valhallasw/pywikibot-core,jayvdb/pywikibot-core,jayvdb/pywikibot-core,smalyshev/pywikibot-core,npdoty/pywikibot,hasteur/g13bot_tools_new,xZise/pywikibot-core,h4ck3rm1k3/pywikibot-core,darthbhyrava/pywikibot-local,h4ck3rm1k3/pywikibot-core,hasteur/g13bot_tools_new,happy5214/pywikibot-core,npdoty/pywikibot,emijrp/pywikibot-core
|
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
- def version(self, code):
- return "1.24wmf6"
-
|
Update mw version 1.24wmf11 derived from super class
|
## Code Before:
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
def version(self, code):
return "1.24wmf6"
## Instruction:
Update mw version 1.24wmf11 derived from super class
## Code After:
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
|
...
self.interwiki_forward = 'wikipedia'
...
|
3a27568211c07cf614aa9865a2f08d2a9b9bfb71
|
dinosaurs/views.py
|
dinosaurs/views.py
|
import os
import json
import httplib as http
import tornado.web
import tornado.ioloop
from dinosaurs import api
from dinosaurs import settings
class SingleStatic(tornado.web.StaticFileHandler):
def initialize(self, path):
self.dirname, self.filename = os.path.split(path)
super(SingleStatic, self).initialize(self.dirname)
def get(self, path=None, include_body=True):
super(SingleStatic, self).get(self.filename, include_body)
class DomainAPIHandler(tornado.web.RequestHandler):
def get(self):
self.write({
'availableDomains': settings.DOMAINS.keys()
})
class EmailAPIHandler(tornado.web.RequestHandler):
def post(self):
try:
req_json = json.loads(self.request.body)
except ValueError:
raise tornado.web.HTTPError(http.BAD_REQUEST)
email = req_json.get('email')
domain = req_json.get('domain')
connection = api.get_connection(domain)
if not email or not domain or not connection:
raise tornado.web.HTTPError(http.BAD_REQUEST)
ret, passwd = api.create_email(connection, email)
self.write({
'password': passwd,
'email': ret['login'],
'domain': ret['domain']
})
self.set_status(http.CREATED)
|
import os
import json
import httplib as http
import tornado.web
import tornado.ioloop
from dinosaurs import api
from dinosaurs import settings
class SingleStatic(tornado.web.StaticFileHandler):
def initialize(self, path):
self.dirname, self.filename = os.path.split(path)
super(SingleStatic, self).initialize(self.dirname)
def get(self, path=None, include_body=True):
super(SingleStatic, self).get(self.filename, include_body)
class DomainAPIHandler(tornado.web.RequestHandler):
def get(self):
self.write({
'availableDomains': settings.DOMAINS.keys()
})
class EmailAPIHandler(tornado.web.RequestHandler):
def write_error(self, status_code, **kwargs):
self.finish({
"code": status_code,
"message": self._reason,
})
def post(self):
try:
req_json = json.loads(self.request.body)
except ValueError:
raise tornado.web.HTTPError(http.BAD_REQUEST)
email = req_json.get('email')
domain = req_json.get('domain')
connection = api.get_connection(domain)
if not email or not domain or not connection:
raise tornado.web.HTTPError(http.BAD_REQUEST)
try:
ret, passwd = api.create_email(connection, email)
except api.YandexException as e:
if e.message != 'occupied':
raise
self.write({})
raise tornado.web.HTTPError(http.FORBIDDEN)
self.write({
'password': passwd,
'email': ret['login'],
'domain': ret['domain']
})
self.set_status(http.CREATED)
|
Return errors in json only
|
Return errors in json only
|
Python
|
mit
|
chrisseto/dinosaurs.sexy,chrisseto/dinosaurs.sexy
|
import os
import json
import httplib as http
import tornado.web
import tornado.ioloop
from dinosaurs import api
from dinosaurs import settings
class SingleStatic(tornado.web.StaticFileHandler):
def initialize(self, path):
self.dirname, self.filename = os.path.split(path)
super(SingleStatic, self).initialize(self.dirname)
def get(self, path=None, include_body=True):
super(SingleStatic, self).get(self.filename, include_body)
class DomainAPIHandler(tornado.web.RequestHandler):
def get(self):
self.write({
'availableDomains': settings.DOMAINS.keys()
})
class EmailAPIHandler(tornado.web.RequestHandler):
+ def write_error(self, status_code, **kwargs):
+ self.finish({
+ "code": status_code,
+ "message": self._reason,
+ })
+
def post(self):
try:
req_json = json.loads(self.request.body)
except ValueError:
raise tornado.web.HTTPError(http.BAD_REQUEST)
email = req_json.get('email')
domain = req_json.get('domain')
connection = api.get_connection(domain)
if not email or not domain or not connection:
raise tornado.web.HTTPError(http.BAD_REQUEST)
+ try:
- ret, passwd = api.create_email(connection, email)
+ ret, passwd = api.create_email(connection, email)
+ except api.YandexException as e:
+ if e.message != 'occupied':
+ raise
+ self.write({})
+ raise tornado.web.HTTPError(http.FORBIDDEN)
self.write({
'password': passwd,
'email': ret['login'],
'domain': ret['domain']
})
self.set_status(http.CREATED)
|
Return errors in json only
|
## Code Before:
import os
import json
import httplib as http
import tornado.web
import tornado.ioloop
from dinosaurs import api
from dinosaurs import settings
class SingleStatic(tornado.web.StaticFileHandler):
def initialize(self, path):
self.dirname, self.filename = os.path.split(path)
super(SingleStatic, self).initialize(self.dirname)
def get(self, path=None, include_body=True):
super(SingleStatic, self).get(self.filename, include_body)
class DomainAPIHandler(tornado.web.RequestHandler):
def get(self):
self.write({
'availableDomains': settings.DOMAINS.keys()
})
class EmailAPIHandler(tornado.web.RequestHandler):
def post(self):
try:
req_json = json.loads(self.request.body)
except ValueError:
raise tornado.web.HTTPError(http.BAD_REQUEST)
email = req_json.get('email')
domain = req_json.get('domain')
connection = api.get_connection(domain)
if not email or not domain or not connection:
raise tornado.web.HTTPError(http.BAD_REQUEST)
ret, passwd = api.create_email(connection, email)
self.write({
'password': passwd,
'email': ret['login'],
'domain': ret['domain']
})
self.set_status(http.CREATED)
## Instruction:
Return errors in json only
## Code After:
import os
import json
import httplib as http
import tornado.web
import tornado.ioloop
from dinosaurs import api
from dinosaurs import settings
class SingleStatic(tornado.web.StaticFileHandler):
def initialize(self, path):
self.dirname, self.filename = os.path.split(path)
super(SingleStatic, self).initialize(self.dirname)
def get(self, path=None, include_body=True):
super(SingleStatic, self).get(self.filename, include_body)
class DomainAPIHandler(tornado.web.RequestHandler):
def get(self):
self.write({
'availableDomains': settings.DOMAINS.keys()
})
class EmailAPIHandler(tornado.web.RequestHandler):
def write_error(self, status_code, **kwargs):
self.finish({
"code": status_code,
"message": self._reason,
})
def post(self):
try:
req_json = json.loads(self.request.body)
except ValueError:
raise tornado.web.HTTPError(http.BAD_REQUEST)
email = req_json.get('email')
domain = req_json.get('domain')
connection = api.get_connection(domain)
if not email or not domain or not connection:
raise tornado.web.HTTPError(http.BAD_REQUEST)
try:
ret, passwd = api.create_email(connection, email)
except api.YandexException as e:
if e.message != 'occupied':
raise
self.write({})
raise tornado.web.HTTPError(http.FORBIDDEN)
self.write({
'password': passwd,
'email': ret['login'],
'domain': ret['domain']
})
self.set_status(http.CREATED)
|
# ... existing code ...
class EmailAPIHandler(tornado.web.RequestHandler):
def write_error(self, status_code, **kwargs):
self.finish({
"code": status_code,
"message": self._reason,
})
def post(self):
# ... modified code ...
try:
ret, passwd = api.create_email(connection, email)
except api.YandexException as e:
if e.message != 'occupied':
raise
self.write({})
raise tornado.web.HTTPError(http.FORBIDDEN)
# ... rest of the code ...
|
d6461896dec112caad81490e1a6d055a3d4c9a95
|
db.py
|
db.py
|
from pymongo import MongoClient
from settings import *
client = MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
|
from pymongo import mongo_client
from pymongo import database
from pymongo import collection
from settings import *
class ModuleMongoClient(mongo_client.MongoClient):
def __getattr__(self, name):
attr = super(ModuleMongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
class ModuleDatabase(database.Database):
def __getattr__(self, name):
attr = super(ModuleDatabase, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return ModuleCollection(self, name)
return attr
class ModuleCollection(collection.Collection):
def __init__(self, database, name, create=False, **kwargs):
_name = 'module_%s_%s' % (self.__class__.__name__, name)
super(ModuleCollection, self).__init__(database=database,
name=_name,
create=create)
client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
module_db = client[MONGO_DB]
|
Add custom wrapper code to pymongo for Modules
|
Add custom wrapper code to pymongo for Modules
|
Python
|
mit
|
billyvg/piebot
|
- from pymongo import MongoClient
+ from pymongo import mongo_client
+ from pymongo import database
+ from pymongo import collection
from settings import *
+ class ModuleMongoClient(mongo_client.MongoClient):
+ def __getattr__(self, name):
+ attr = super(ModuleMongoClient, self).__getattr__(name)
+ if isinstance(attr, database.Database):
+ return Database(self, name)
+ return attr
+
+ class ModuleDatabase(database.Database):
+ def __getattr__(self, name):
+ attr = super(ModuleDatabase, self).__getattr__(name)
+ if isinstance(attr, collection.Collection):
+ return ModuleCollection(self, name)
+ return attr
+
+ class ModuleCollection(collection.Collection):
+ def __init__(self, database, name, create=False, **kwargs):
+ _name = 'module_%s_%s' % (self.__class__.__name__, name)
+ super(ModuleCollection, self).__init__(database=database,
+ name=_name,
+ create=create)
+
- client = MongoClient(MONGO_HOST, MONGO_PORT)
+ client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
+ module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
+ module_db = client[MONGO_DB]
|
Add custom wrapper code to pymongo for Modules
|
## Code Before:
from pymongo import MongoClient
from settings import *
client = MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
## Instruction:
Add custom wrapper code to pymongo for Modules
## Code After:
from pymongo import mongo_client
from pymongo import database
from pymongo import collection
from settings import *
class ModuleMongoClient(mongo_client.MongoClient):
def __getattr__(self, name):
attr = super(ModuleMongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
class ModuleDatabase(database.Database):
def __getattr__(self, name):
attr = super(ModuleDatabase, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return ModuleCollection(self, name)
return attr
class ModuleCollection(collection.Collection):
def __init__(self, database, name, create=False, **kwargs):
_name = 'module_%s_%s' % (self.__class__.__name__, name)
super(ModuleCollection, self).__init__(database=database,
name=_name,
create=create)
client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
module_db = client[MONGO_DB]
|
...
from pymongo import mongo_client
from pymongo import database
from pymongo import collection
...
class ModuleMongoClient(mongo_client.MongoClient):
def __getattr__(self, name):
attr = super(ModuleMongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
class ModuleDatabase(database.Database):
def __getattr__(self, name):
attr = super(ModuleDatabase, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return ModuleCollection(self, name)
return attr
class ModuleCollection(collection.Collection):
def __init__(self, database, name, create=False, **kwargs):
_name = 'module_%s_%s' % (self.__class__.__name__, name)
super(ModuleCollection, self).__init__(database=database,
name=_name,
create=create)
client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
module_db = client[MONGO_DB]
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.