commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
437ed5ee5e919186eabd1d71b0c1949adc1cf378
|
src/orca/gnome-terminal.py
|
src/orca/gnome-terminal.py
|
import a11y
import speech
def onTextInserted (e):
if e.source.role != "terminal":
return
speech.say ("default", e.any_data)
def onTextDeleted (event):
"""Called whenever text is deleted from an object.
Arguments:
- event: the Event
"""
# Ignore text deletions from non-focused objects, unless the
# currently focused object is the parent of the object from which
# text was deleted
#
if (event.source != a11y.focusedObject) \
and (event.source.parent != a11y.focusedObject):
pass
else:
brlUpdateText (event.source)
|
import a11y
import speech
import default
def onTextInserted (e):
if e.source.role != "terminal":
return
speech.say ("default", e.any_data)
def onTextDeleted (event):
"""Called whenever text is deleted from an object.
Arguments:
- event: the Event
"""
# Ignore text deletions from non-focused objects, unless the
# currently focused object is the parent of the object from which
# text was deleted
#
if (event.source != a11y.focusedObject) \
and (event.source.parent != a11y.focusedObject):
pass
else:
default.brlUpdateText (event.source)
|
Call default.brlUpdateText instead of brlUpdateText (which was undefined)
|
Call default.brlUpdateText instead of brlUpdateText (which was undefined)
|
Python
|
lgpl-2.1
|
GNOME/orca,h4ck3rm1k3/orca-sonar,pvagner/orca,h4ck3rm1k3/orca-sonar,GNOME/orca,pvagner/orca,h4ck3rm1k3/orca-sonar,chrys87/orca-beep,chrys87/orca-beep,pvagner/orca,pvagner/orca,chrys87/orca-beep,GNOME/orca,chrys87/orca-beep,GNOME/orca
|
import a11y
import speech
+ import default
def onTextInserted (e):
if e.source.role != "terminal":
return
speech.say ("default", e.any_data)
def onTextDeleted (event):
"""Called whenever text is deleted from an object.
Arguments:
- event: the Event
"""
# Ignore text deletions from non-focused objects, unless the
# currently focused object is the parent of the object from which
# text was deleted
#
if (event.source != a11y.focusedObject) \
and (event.source.parent != a11y.focusedObject):
pass
else:
- brlUpdateText (event.source)
+ default.brlUpdateText (event.source)
|
Call default.brlUpdateText instead of brlUpdateText (which was undefined)
|
## Code Before:
import a11y
import speech
def onTextInserted (e):
if e.source.role != "terminal":
return
speech.say ("default", e.any_data)
def onTextDeleted (event):
"""Called whenever text is deleted from an object.
Arguments:
- event: the Event
"""
# Ignore text deletions from non-focused objects, unless the
# currently focused object is the parent of the object from which
# text was deleted
#
if (event.source != a11y.focusedObject) \
and (event.source.parent != a11y.focusedObject):
pass
else:
brlUpdateText (event.source)
## Instruction:
Call default.brlUpdateText instead of brlUpdateText (which was undefined)
## Code After:
import a11y
import speech
import default
def onTextInserted (e):
if e.source.role != "terminal":
return
speech.say ("default", e.any_data)
def onTextDeleted (event):
"""Called whenever text is deleted from an object.
Arguments:
- event: the Event
"""
# Ignore text deletions from non-focused objects, unless the
# currently focused object is the parent of the object from which
# text was deleted
#
if (event.source != a11y.focusedObject) \
and (event.source.parent != a11y.focusedObject):
pass
else:
default.brlUpdateText (event.source)
|
import a11y
import speech
+ import default
def onTextInserted (e):
if e.source.role != "terminal":
return
speech.say ("default", e.any_data)
def onTextDeleted (event):
"""Called whenever text is deleted from an object.
Arguments:
- event: the Event
"""
# Ignore text deletions from non-focused objects, unless the
# currently focused object is the parent of the object from which
# text was deleted
#
if (event.source != a11y.focusedObject) \
and (event.source.parent != a11y.focusedObject):
pass
else:
- brlUpdateText (event.source)
+ default.brlUpdateText (event.source)
? ++++++++
|
9931bd1d5459a983717fb502826f3cca87225b96
|
src/qrl/services/grpcHelper.py
|
src/qrl/services/grpcHelper.py
|
from grpc import StatusCode
from qrl.core.misc import logger
class GrpcExceptionWrapper(object):
def __init__(self, response_type, state_code=StatusCode.UNKNOWN):
self.response_type = response_type
self.state_code = state_code
def _set_context(self, context, exception):
if context is not None:
context.set_code(self.state_code)
context.set_details(str(exception))
def __call__(self, f):
def wrap_f(caller_self, request, context):
try:
return f(caller_self, request, context)
except ValueError as e:
self._set_context(context, e)
logger.info(str(e))
return self.response_type()
except Exception as e:
self._set_context(context, e)
logger.exception(e)
return self.response_type()
return wrap_f
|
from grpc import StatusCode
from qrl.core.misc import logger
class GrpcExceptionWrapper(object):
def __init__(self, response_type, state_code=StatusCode.UNKNOWN):
self.response_type = response_type
self.state_code = state_code
def _set_context(self, context, exception):
if context is not None:
context.set_code(self.state_code)
context.set_details(str(exception))
def __call__(self, f):
def wrap_f(caller_self, request, context):
try:
return f(caller_self, request, context)
except ValueError as e:
context.set_code(StatusCode.INVALID_ARGUMENT)
self._set_context(context, e)
logger.info(str(e))
return self.response_type()
except Exception as e:
self._set_context(context, e)
logger.exception(e)
return self.response_type()
return wrap_f
|
Set code to Invalid argument for ValueErrors
|
Set code to Invalid argument for ValueErrors
|
Python
|
mit
|
jleni/QRL,cyyber/QRL,jleni/QRL,cyyber/QRL,theQRL/QRL,randomshinichi/QRL,theQRL/QRL,randomshinichi/QRL
|
from grpc import StatusCode
from qrl.core.misc import logger
class GrpcExceptionWrapper(object):
def __init__(self, response_type, state_code=StatusCode.UNKNOWN):
self.response_type = response_type
self.state_code = state_code
def _set_context(self, context, exception):
if context is not None:
context.set_code(self.state_code)
context.set_details(str(exception))
def __call__(self, f):
def wrap_f(caller_self, request, context):
try:
return f(caller_self, request, context)
except ValueError as e:
+ context.set_code(StatusCode.INVALID_ARGUMENT)
self._set_context(context, e)
logger.info(str(e))
return self.response_type()
except Exception as e:
self._set_context(context, e)
logger.exception(e)
return self.response_type()
return wrap_f
|
Set code to Invalid argument for ValueErrors
|
## Code Before:
from grpc import StatusCode
from qrl.core.misc import logger
class GrpcExceptionWrapper(object):
def __init__(self, response_type, state_code=StatusCode.UNKNOWN):
self.response_type = response_type
self.state_code = state_code
def _set_context(self, context, exception):
if context is not None:
context.set_code(self.state_code)
context.set_details(str(exception))
def __call__(self, f):
def wrap_f(caller_self, request, context):
try:
return f(caller_self, request, context)
except ValueError as e:
self._set_context(context, e)
logger.info(str(e))
return self.response_type()
except Exception as e:
self._set_context(context, e)
logger.exception(e)
return self.response_type()
return wrap_f
## Instruction:
Set code to Invalid argument for ValueErrors
## Code After:
from grpc import StatusCode
from qrl.core.misc import logger
class GrpcExceptionWrapper(object):
def __init__(self, response_type, state_code=StatusCode.UNKNOWN):
self.response_type = response_type
self.state_code = state_code
def _set_context(self, context, exception):
if context is not None:
context.set_code(self.state_code)
context.set_details(str(exception))
def __call__(self, f):
def wrap_f(caller_self, request, context):
try:
return f(caller_self, request, context)
except ValueError as e:
context.set_code(StatusCode.INVALID_ARGUMENT)
self._set_context(context, e)
logger.info(str(e))
return self.response_type()
except Exception as e:
self._set_context(context, e)
logger.exception(e)
return self.response_type()
return wrap_f
|
from grpc import StatusCode
from qrl.core.misc import logger
class GrpcExceptionWrapper(object):
def __init__(self, response_type, state_code=StatusCode.UNKNOWN):
self.response_type = response_type
self.state_code = state_code
def _set_context(self, context, exception):
if context is not None:
context.set_code(self.state_code)
context.set_details(str(exception))
def __call__(self, f):
def wrap_f(caller_self, request, context):
try:
return f(caller_self, request, context)
except ValueError as e:
+ context.set_code(StatusCode.INVALID_ARGUMENT)
self._set_context(context, e)
logger.info(str(e))
return self.response_type()
except Exception as e:
self._set_context(context, e)
logger.exception(e)
return self.response_type()
return wrap_f
|
94a944b01953ed75bfbefbd11ed62ca438cd9200
|
accounts/tests/test_models.py
|
accounts/tests/test_models.py
|
from django.test import TestCase
from django.contrib.auth import get_user_model
USER = get_user_model()
TEST_EMAIL = '[email protected]'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated)
|
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
USER = get_user_model()
TEST_EMAIL = '[email protected]'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
def test_user_invalid_without_email(self):
"""Should raise if the user model requires an email.
"""
with self.assertRaises(ValidationError):
user = USER()
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated)
|
Add test for unsupplied email for user model
|
Add test for unsupplied email for user model
|
Python
|
mit
|
randomic/aniauth-tdd,randomic/aniauth-tdd
|
from django.test import TestCase
from django.contrib.auth import get_user_model
+ from django.core.exceptions import ValidationError
USER = get_user_model()
TEST_EMAIL = '[email protected]'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
+ def test_user_invalid_without_email(self):
+ """Should raise if the user model requires an email.
+
+ """
+ with self.assertRaises(ValidationError):
+ user = USER()
+ user.full_clean()
+
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated)
|
Add test for unsupplied email for user model
|
## Code Before:
from django.test import TestCase
from django.contrib.auth import get_user_model
USER = get_user_model()
TEST_EMAIL = '[email protected]'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated)
## Instruction:
Add test for unsupplied email for user model
## Code After:
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
USER = get_user_model()
TEST_EMAIL = '[email protected]'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
def test_user_invalid_without_email(self):
"""Should raise if the user model requires an email.
"""
with self.assertRaises(ValidationError):
user = USER()
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated)
|
from django.test import TestCase
from django.contrib.auth import get_user_model
+ from django.core.exceptions import ValidationError
USER = get_user_model()
TEST_EMAIL = '[email protected]'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
+ def test_user_invalid_without_email(self):
+ """Should raise if the user model requires an email.
+
+ """
+ with self.assertRaises(ValidationError):
+ user = USER()
+ user.full_clean()
+
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated)
|
d2fb1f22be6c6434873f2bcafb6b8a9b714acde9
|
website/archiver/decorators.py
|
website/archiver/decorators.py
|
import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import utils
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
utils.handle_archive_fail(
ARCHIVER_UNCAUGHT_ERROR,
registration.registered_from,
registration,
registration.registered_user,
str(e)
)
return wrapped
|
import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import signals
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
signals.send.archive_fail(
registration,
ARCHIVER_UNCAUGHT_ERROR,
[str(e)]
)
return wrapped
|
Use fail signal in fail_archive_on_error decorator
|
Use fail signal in fail_archive_on_error decorator
|
Python
|
apache-2.0
|
amyshi188/osf.io,caneruguz/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,mluke93/osf.io,DanielSBrown/osf.io,Nesiehr/osf.io,jeffreyliu3230/osf.io,chrisseto/osf.io,acshi/osf.io,mattclark/osf.io,billyhunt/osf.io,caneruguz/osf.io,cosenal/osf.io,SSJohns/osf.io,njantrania/osf.io,mattclark/osf.io,alexschiller/osf.io,samchrisinger/osf.io,HarryRybacki/osf.io,MerlinZhang/osf.io,mluo613/osf.io,TomBaxter/osf.io,mattclark/osf.io,kch8qx/osf.io,baylee-d/osf.io,chennan47/osf.io,asanfilippo7/osf.io,asanfilippo7/osf.io,amyshi188/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,brandonPurvis/osf.io,acshi/osf.io,bdyetton/prettychart,danielneis/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,emetsger/osf.io,reinaH/osf.io,ticklemepierce/osf.io,felliott/osf.io,hmoco/osf.io,SSJohns/osf.io,danielneis/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,jmcarp/osf.io,TomHeatwole/osf.io,ZobairAlijan/osf.io,mfraezz/osf.io,cldershem/osf.io,adlius/osf.io,laurenrevere/osf.io,jolene-esposito/osf.io,sloria/osf.io,Johnetordoff/osf.io,doublebits/osf.io,MerlinZhang/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,billyhunt/osf.io,crcresearch/osf.io,njantrania/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,mfraezz/osf.io,hmoco/osf.io,jinluyuan/osf.io,monikagrabowska/osf.io,danielneis/osf.io,aaxelb/osf.io,Nesiehr/osf.io,caseyrygt/osf.io,kwierman/osf.io,cldershem/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,fabianvf/osf.io,amyshi188/osf.io,petermalcolm/osf.io,adlius/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,rdhyee/osf.io,samanehsan/osf.io,haoyuchen1992/osf.io,asanfilippo7/osf.io,dplorimer/osf,leb2dg/osf.io,mfraezz/osf.io,abought/osf.io,amyshi188/osf.io,doublebits/osf.io,sbt9uc/osf.io,lyndsysimon/osf.io,dplorimer/osf,caneruguz/osf.io,laurenrevere/osf.io,ticklemepierce/osf.io,lyndsysimon/osf.io,DanielSBrown/osf.io,jmcarp/osf.io,baylee-d/osf.io,GageGaskins/osf.io,chennan47/osf.io,fabianvf/osf.io,cldershem/osf.io,jmcarp/osf.io,jnayak1/osf.io,binoculars/osf.io,zamattiac/osf.io,acshi/osf.io,crcresearch/osf.io,jinluyuan/osf.io,jnayak1/osf.io,binoculars/osf.io,Ghalko/osf.io,jinluyuan/osf.io,cosenal/osf.io,RomanZWang/osf.io,wearpants/osf.io,cslzchen/osf.io,ticklemepierce/osf.io,wearpants/osf.io,samchrisinger/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,abought/osf.io,zachjanicki/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,bdyetton/prettychart,MerlinZhang/osf.io,pattisdr/osf.io,chennan47/osf.io,bdyetton/prettychart,caseyrygt/osf.io,samanehsan/osf.io,pattisdr/osf.io,reinaH/osf.io,sloria/osf.io,caseyrollins/osf.io,zamattiac/osf.io,bdyetton/prettychart,caseyrollins/osf.io,TomHeatwole/osf.io,jeffreyliu3230/osf.io,cldershem/osf.io,mluo613/osf.io,KAsante95/osf.io,lyndsysimon/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,petermalcolm/osf.io,billyhunt/osf.io,chrisseto/osf.io,GageGaskins/osf.io,RomanZWang/osf.io,Ghalko/osf.io,petermalcolm/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,ckc6cz/osf.io,njantrania/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,ckc6cz/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,leb2dg/osf.io,cwisecarver/osf.io,billyhunt/osf.io,GageGaskins/osf.io,dplorimer/osf,arpitar/osf.io,dplorimer/osf,baylee-d/osf.io,adlius/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,doublebits/osf.io,kwierman/osf.io,adlius/osf.io,aaxelb/osf.io,jnayak1/osf.io,haoyuchen1992/osf.io,KAsante95/osf.io,cwisecarver/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,MerlinZhang/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,ticklemepierce/osf.io,pattisdr/osf.io,erinspace/osf.io,arpitar/osf.io,icereval/osf.io,felliott/osf.io,KAsante95/osf.io,danielneis/osf.io,leb2dg/osf.io,caseyrygt/osf.io,GageGaskins/osf.io,petermalcolm/osf.io,mluo613/osf.io,KAsante95/osf.io,HalcyonChimera/osf.io,jeffreyliu3230/osf.io,zachjanicki/osf.io,zamattiac/osf.io,HarryRybacki/osf.io,ZobairAlijan/osf.io,cwisecarver/osf.io,njantrania/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,emetsger/osf.io,cosenal/osf.io,sbt9uc/osf.io,RomanZWang/osf.io,hmoco/osf.io,reinaH/osf.io,Ghalko/osf.io,icereval/osf.io,cslzchen/osf.io,arpitar/osf.io,reinaH/osf.io,zachjanicki/osf.io,jolene-esposito/osf.io,fabianvf/osf.io,alexschiller/osf.io,GageGaskins/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,rdhyee/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,mluo613/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,brianjgeiger/osf.io,samchrisinger/osf.io,caseyrygt/osf.io,erinspace/osf.io,kwierman/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,HarryRybacki/osf.io,KAsante95/osf.io,leb2dg/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,caseyrollins/osf.io,sbt9uc/osf.io,samanehsan/osf.io,wearpants/osf.io,abought/osf.io,ckc6cz/osf.io,crcresearch/osf.io,chrisseto/osf.io,lyndsysimon/osf.io,jolene-esposito/osf.io,fabianvf/osf.io,binoculars/osf.io,kch8qx/osf.io,icereval/osf.io,mluke93/osf.io,Johnetordoff/osf.io,jmcarp/osf.io,mluo613/osf.io,acshi/osf.io,asanfilippo7/osf.io,saradbowman/osf.io,Nesiehr/osf.io,kch8qx/osf.io,mluke93/osf.io,mfraezz/osf.io,TomBaxter/osf.io,samanehsan/osf.io,mluke93/osf.io,arpitar/osf.io,jolene-esposito/osf.io,alexschiller/osf.io,cslzchen/osf.io,sbt9uc/osf.io,ZobairAlijan/osf.io,haoyuchen1992/osf.io,jinluyuan/osf.io,alexschiller/osf.io,jnayak1/osf.io,cosenal/osf.io,sloria/osf.io,HarryRybacki/osf.io,ckc6cz/osf.io,doublebits/osf.io,saradbowman/osf.io,abought/osf.io,doublebits/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,emetsger/osf.io,emetsger/osf.io,acshi/osf.io,aaxelb/osf.io
|
import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
- from website.archiver import utils
+ from website.archiver import signals
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
- utils.handle_archive_fail(
+ signals.send.archive_fail(
+ registration,
ARCHIVER_UNCAUGHT_ERROR,
- registration.registered_from,
- registration,
- registration.registered_user,
- str(e)
+ [str(e)]
)
return wrapped
|
Use fail signal in fail_archive_on_error decorator
|
## Code Before:
import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import utils
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
utils.handle_archive_fail(
ARCHIVER_UNCAUGHT_ERROR,
registration.registered_from,
registration,
registration.registered_user,
str(e)
)
return wrapped
## Instruction:
Use fail signal in fail_archive_on_error decorator
## Code After:
import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import signals
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
signals.send.archive_fail(
registration,
ARCHIVER_UNCAUGHT_ERROR,
[str(e)]
)
return wrapped
|
import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
- from website.archiver import utils
? ^^
+ from website.archiver import signals
? ^ +++
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
- utils.handle_archive_fail(
? ^^ ^^ ^^^
+ signals.send.archive_fail(
? ^ +++ ^^ ^
+ registration,
ARCHIVER_UNCAUGHT_ERROR,
- registration.registered_from,
- registration,
- registration.registered_user,
- str(e)
+ [str(e)]
? + +
)
return wrapped
|
99c7c87d1b84b70962250e362dcfff75e77cb3fe
|
txircd/modules/cmode_k.py
|
txircd/modules/cmode_k.py
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
Check that the password parameter when unsetting mode k matches the password that is set
|
Check that the password parameter when unsetting mode k matches the password that is set
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,ElementalAlchemist/txircd,DesertBus/txircd
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
+ def checkUnset(self, user, target, param):
+ if param == target.mode["k"]:
+ return True
+ return False
+
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
Check that the password parameter when unsetting mode k matches the password that is set
|
## Code Before:
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
## Instruction:
Check that the password parameter when unsetting mode k matches the password that is set
## Code After:
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
+ def checkUnset(self, user, target, param):
+ if param == target.mode["k"]:
+ return True
+ return False
+
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
3feccc140c0371becccb3f80bef00d30b4bc15bf
|
corehq/sql_accessors/migrations/0056_add_hashlib_functions.py
|
corehq/sql_accessors/migrations/0056_add_hashlib_functions.py
|
from __future__ import absolute_import, unicode_literals
from django.db import migrations
from django.conf import settings
from corehq.sql_db.operations import HqRunSQL, noop_migration
class Migration(migrations.Migration):
dependencies = [
('sql_accessors', '0055_set_form_modified_on'),
]
operations = [
# this originally installed the hashlib extension in production as well
# but commcare-cloud does that where possible already
# and Amazon RDS doesn't allow it
HqRunSQL(
'CREATE EXTENSION IF NOT EXISTS hashlib',
'DROP EXTENSION hashlib'
)
if settings.UNIT_TESTING else noop_migration()
]
|
from __future__ import absolute_import, unicode_literals
from django.db import migrations
from django.conf import settings
from corehq.sql_db.operations import HqRunSQL, noop_migration
class Migration(migrations.Migration):
dependencies = [
('sql_accessors', '0055_set_form_modified_on'),
]
operations = [
# this originally installed the hashlib extension in production as well
# but commcare-cloud does that where possible already
# and Amazon RDS doesn't allow it
# Todo: Move this to testing harness, doesn't really belong here.
# See https://github.com/dimagi/commcare-hq/pull/21627#pullrequestreview-149807976
HqRunSQL(
'CREATE EXTENSION IF NOT EXISTS hashlib',
'DROP EXTENSION hashlib'
)
if settings.UNIT_TESTING else noop_migration()
]
|
Add comment about moving hashlib extention creation to test harness
|
Add comment about moving hashlib extention creation to test harness
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import, unicode_literals
from django.db import migrations
from django.conf import settings
from corehq.sql_db.operations import HqRunSQL, noop_migration
class Migration(migrations.Migration):
dependencies = [
('sql_accessors', '0055_set_form_modified_on'),
]
operations = [
# this originally installed the hashlib extension in production as well
# but commcare-cloud does that where possible already
# and Amazon RDS doesn't allow it
+ # Todo: Move this to testing harness, doesn't really belong here.
+ # See https://github.com/dimagi/commcare-hq/pull/21627#pullrequestreview-149807976
HqRunSQL(
'CREATE EXTENSION IF NOT EXISTS hashlib',
'DROP EXTENSION hashlib'
)
if settings.UNIT_TESTING else noop_migration()
]
|
Add comment about moving hashlib extention creation to test harness
|
## Code Before:
from __future__ import absolute_import, unicode_literals
from django.db import migrations
from django.conf import settings
from corehq.sql_db.operations import HqRunSQL, noop_migration
class Migration(migrations.Migration):
dependencies = [
('sql_accessors', '0055_set_form_modified_on'),
]
operations = [
# this originally installed the hashlib extension in production as well
# but commcare-cloud does that where possible already
# and Amazon RDS doesn't allow it
HqRunSQL(
'CREATE EXTENSION IF NOT EXISTS hashlib',
'DROP EXTENSION hashlib'
)
if settings.UNIT_TESTING else noop_migration()
]
## Instruction:
Add comment about moving hashlib extention creation to test harness
## Code After:
from __future__ import absolute_import, unicode_literals
from django.db import migrations
from django.conf import settings
from corehq.sql_db.operations import HqRunSQL, noop_migration
class Migration(migrations.Migration):
dependencies = [
('sql_accessors', '0055_set_form_modified_on'),
]
operations = [
# this originally installed the hashlib extension in production as well
# but commcare-cloud does that where possible already
# and Amazon RDS doesn't allow it
# Todo: Move this to testing harness, doesn't really belong here.
# See https://github.com/dimagi/commcare-hq/pull/21627#pullrequestreview-149807976
HqRunSQL(
'CREATE EXTENSION IF NOT EXISTS hashlib',
'DROP EXTENSION hashlib'
)
if settings.UNIT_TESTING else noop_migration()
]
|
from __future__ import absolute_import, unicode_literals
from django.db import migrations
from django.conf import settings
from corehq.sql_db.operations import HqRunSQL, noop_migration
class Migration(migrations.Migration):
dependencies = [
('sql_accessors', '0055_set_form_modified_on'),
]
operations = [
# this originally installed the hashlib extension in production as well
# but commcare-cloud does that where possible already
# and Amazon RDS doesn't allow it
+ # Todo: Move this to testing harness, doesn't really belong here.
+ # See https://github.com/dimagi/commcare-hq/pull/21627#pullrequestreview-149807976
HqRunSQL(
'CREATE EXTENSION IF NOT EXISTS hashlib',
'DROP EXTENSION hashlib'
)
if settings.UNIT_TESTING else noop_migration()
]
|
8c81f606499ebadddaf2a362bc8845eb69a21e8d
|
lds-gen.py
|
lds-gen.py
|
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('};')
|
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('local:\n *;')
print('};')
|
Stop exporting internal symbols from the shared libraries.
|
Stop exporting internal symbols from the shared libraries.
|
Python
|
bsd-2-clause
|
orthrus/librdkafka,klonikar/librdkafka,klonikar/librdkafka,senior7515/librdkafka,janmejay/librdkafka,senior7515/librdkafka,orthrus/librdkafka,klonikar/librdkafka,janmejay/librdkafka,orthrus/librdkafka,janmejay/librdkafka,senior7515/librdkafka,senior7515/librdkafka,klonikar/librdkafka,orthrus/librdkafka,janmejay/librdkafka
|
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
+ print('local:\n *;')
+
print('};')
|
Stop exporting internal symbols from the shared libraries.
|
## Code Before:
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('};')
## Instruction:
Stop exporting internal symbols from the shared libraries.
## Code After:
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('local:\n *;')
print('};')
|
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
+ print('local:\n *;')
+
print('};')
|
b2d121a2ee8750afd0f4d527c80371bd501f841c
|
neo/io/nixio_fr.py
|
neo/io/nixio_fr.py
|
from neo.io.basefromrawio import BaseFromRaw
from neo.rawio.nixrawio import NIXRawIO
# This class subjects to limitations when there are multiple asymmetric blocks
class NixIO(NIXRawIO, BaseFromRaw):
name = 'NIX IO'
_prefered_signal_group_mode = 'group-by-same-units'
_prefered_units_group_mode = 'split-all'
def __init__(self, filename):
NIXRawIO.__init__(self, filename)
BaseFromRaw.__init__(self, filename)
def read_block(self, block_index=0, lazy=False, signal_group_mode=None,
units_group_mode=None, load_waveforms=False):
bl = super().read_block(block_index, lazy, signal_group_mode,
units_group_mode, load_waveforms)
for chx in bl.channel_indexes:
if "nix_name" in chx.annotations:
nixname = chx.annotations["nix_name"]
chx.annotations["nix_name"] = nixname[0]
return bl
def __enter__(self):
return self
def __exit__(self, *args):
self.header = None
self.file.close()
|
from neo.io.basefromrawio import BaseFromRaw
from neo.rawio.nixrawio import NIXRawIO
# This class subjects to limitations when there are multiple asymmetric blocks
class NixIO(NIXRawIO, BaseFromRaw):
name = 'NIX IO'
_prefered_signal_group_mode = 'group-by-same-units'
_prefered_units_group_mode = 'split-all'
def __init__(self, filename):
NIXRawIO.__init__(self, filename)
BaseFromRaw.__init__(self, filename)
def read_block(self, block_index=0, lazy=False, signal_group_mode=None,
units_group_mode=None, load_waveforms=False):
bl = super(NixIO, self).read_block(block_index, lazy,
signal_group_mode,
units_group_mode,
load_waveforms)
for chx in bl.channel_indexes:
if "nix_name" in chx.annotations:
nixname = chx.annotations["nix_name"]
chx.annotations["nix_name"] = nixname[0]
return bl
def __enter__(self):
return self
def __exit__(self, *args):
self.header = None
self.file.close()
|
Use Python2 compatible super() call
|
[nixio] Use Python2 compatible super() call
|
Python
|
bsd-3-clause
|
NeuralEnsemble/python-neo,JuliaSprenger/python-neo,samuelgarcia/python-neo,INM-6/python-neo,rgerkin/python-neo,apdavison/python-neo
|
from neo.io.basefromrawio import BaseFromRaw
from neo.rawio.nixrawio import NIXRawIO
# This class subjects to limitations when there are multiple asymmetric blocks
class NixIO(NIXRawIO, BaseFromRaw):
name = 'NIX IO'
_prefered_signal_group_mode = 'group-by-same-units'
_prefered_units_group_mode = 'split-all'
def __init__(self, filename):
NIXRawIO.__init__(self, filename)
BaseFromRaw.__init__(self, filename)
def read_block(self, block_index=0, lazy=False, signal_group_mode=None,
units_group_mode=None, load_waveforms=False):
- bl = super().read_block(block_index, lazy, signal_group_mode,
+ bl = super(NixIO, self).read_block(block_index, lazy,
+ signal_group_mode,
- units_group_mode, load_waveforms)
+ units_group_mode,
+ load_waveforms)
for chx in bl.channel_indexes:
if "nix_name" in chx.annotations:
nixname = chx.annotations["nix_name"]
chx.annotations["nix_name"] = nixname[0]
return bl
def __enter__(self):
return self
def __exit__(self, *args):
self.header = None
self.file.close()
|
Use Python2 compatible super() call
|
## Code Before:
from neo.io.basefromrawio import BaseFromRaw
from neo.rawio.nixrawio import NIXRawIO
# This class subjects to limitations when there are multiple asymmetric blocks
class NixIO(NIXRawIO, BaseFromRaw):
name = 'NIX IO'
_prefered_signal_group_mode = 'group-by-same-units'
_prefered_units_group_mode = 'split-all'
def __init__(self, filename):
NIXRawIO.__init__(self, filename)
BaseFromRaw.__init__(self, filename)
def read_block(self, block_index=0, lazy=False, signal_group_mode=None,
units_group_mode=None, load_waveforms=False):
bl = super().read_block(block_index, lazy, signal_group_mode,
units_group_mode, load_waveforms)
for chx in bl.channel_indexes:
if "nix_name" in chx.annotations:
nixname = chx.annotations["nix_name"]
chx.annotations["nix_name"] = nixname[0]
return bl
def __enter__(self):
return self
def __exit__(self, *args):
self.header = None
self.file.close()
## Instruction:
Use Python2 compatible super() call
## Code After:
from neo.io.basefromrawio import BaseFromRaw
from neo.rawio.nixrawio import NIXRawIO
# This class subjects to limitations when there are multiple asymmetric blocks
class NixIO(NIXRawIO, BaseFromRaw):
name = 'NIX IO'
_prefered_signal_group_mode = 'group-by-same-units'
_prefered_units_group_mode = 'split-all'
def __init__(self, filename):
NIXRawIO.__init__(self, filename)
BaseFromRaw.__init__(self, filename)
def read_block(self, block_index=0, lazy=False, signal_group_mode=None,
units_group_mode=None, load_waveforms=False):
bl = super(NixIO, self).read_block(block_index, lazy,
signal_group_mode,
units_group_mode,
load_waveforms)
for chx in bl.channel_indexes:
if "nix_name" in chx.annotations:
nixname = chx.annotations["nix_name"]
chx.annotations["nix_name"] = nixname[0]
return bl
def __enter__(self):
return self
def __exit__(self, *args):
self.header = None
self.file.close()
|
from neo.io.basefromrawio import BaseFromRaw
from neo.rawio.nixrawio import NIXRawIO
# This class subjects to limitations when there are multiple asymmetric blocks
class NixIO(NIXRawIO, BaseFromRaw):
name = 'NIX IO'
_prefered_signal_group_mode = 'group-by-same-units'
_prefered_units_group_mode = 'split-all'
def __init__(self, filename):
NIXRawIO.__init__(self, filename)
BaseFromRaw.__init__(self, filename)
def read_block(self, block_index=0, lazy=False, signal_group_mode=None,
units_group_mode=None, load_waveforms=False):
- bl = super().read_block(block_index, lazy, signal_group_mode,
? -------------------
+ bl = super(NixIO, self).read_block(block_index, lazy,
? +++++++++++
+ signal_group_mode,
- units_group_mode, load_waveforms)
? ----------------
+ units_group_mode,
? +++++++++++
+ load_waveforms)
for chx in bl.channel_indexes:
if "nix_name" in chx.annotations:
nixname = chx.annotations["nix_name"]
chx.annotations["nix_name"] = nixname[0]
return bl
def __enter__(self):
return self
def __exit__(self, *args):
self.header = None
self.file.close()
|
577da237d219aacd4413cb789fb08c76ca218223
|
ws/plugins/accuweather/__init__.py
|
ws/plugins/accuweather/__init__.py
|
import numpy as np
import pickle
import os
import sys
import ws.bad as bad
mydir = os.path.abspath(os.path.dirname(__file__))
print(mydir)
lookupmatrix = pickle.load(open( \
mydir +'/accuweather_location_codes.dump','rb'))
lookuplist = lookupmatrix.tolist()
def build_url(city):
# check whether input is a string
if type(city) != str:
raise(bad.Type("The input city " +str(city) +" wasn't of type string"))
index = lookuplist[1].index(city)
accuweather_index = lookuplist[0][index]
url = 'http://realtek.accu-weather.com/widget/realtek/weather-data.asp' \
+ '?location=cityId:' \
+ str(accuweather_index)
return url
|
import numpy as np
import pickle
import os
import sys
import ws.bad as bad
mydir = os.path.abspath(os.path.dirname(__file__))
lookupmatrix = pickle.load(open(os.path.join(mydir, 'accuweather_location_codes.dump'), 'rb'))
lookuplist = lookupmatrix.tolist()
def build_url(city):
# check whether input is a string
if type(city) != str:
raise(bad.Type("The input city " +str(city) +" wasn't of type string"))
index = lookuplist[1].index(city)
accuweather_index = lookuplist[0][index]
url = 'http://realtek.accu-weather.com/widget/realtek/weather-data.asp' \
+ '?location=cityId:' \
+ str(accuweather_index)
return url
|
Use os.path.join() to join paths
|
Use os.path.join() to join paths
|
Python
|
bsd-3-clause
|
BCCN-Prog/webscraping
|
-
import numpy as np
import pickle
import os
import sys
import ws.bad as bad
mydir = os.path.abspath(os.path.dirname(__file__))
+ lookupmatrix = pickle.load(open(os.path.join(mydir, 'accuweather_location_codes.dump'), 'rb'))
-
- print(mydir)
-
- lookupmatrix = pickle.load(open( \
- mydir +'/accuweather_location_codes.dump','rb'))
-
lookuplist = lookupmatrix.tolist()
def build_url(city):
# check whether input is a string
if type(city) != str:
raise(bad.Type("The input city " +str(city) +" wasn't of type string"))
+
-
-
index = lookuplist[1].index(city)
accuweather_index = lookuplist[0][index]
-
+
url = 'http://realtek.accu-weather.com/widget/realtek/weather-data.asp' \
+ '?location=cityId:' \
+ str(accuweather_index)
return url
+
|
Use os.path.join() to join paths
|
## Code Before:
import numpy as np
import pickle
import os
import sys
import ws.bad as bad
mydir = os.path.abspath(os.path.dirname(__file__))
print(mydir)
lookupmatrix = pickle.load(open( \
mydir +'/accuweather_location_codes.dump','rb'))
lookuplist = lookupmatrix.tolist()
def build_url(city):
# check whether input is a string
if type(city) != str:
raise(bad.Type("The input city " +str(city) +" wasn't of type string"))
index = lookuplist[1].index(city)
accuweather_index = lookuplist[0][index]
url = 'http://realtek.accu-weather.com/widget/realtek/weather-data.asp' \
+ '?location=cityId:' \
+ str(accuweather_index)
return url
## Instruction:
Use os.path.join() to join paths
## Code After:
import numpy as np
import pickle
import os
import sys
import ws.bad as bad
mydir = os.path.abspath(os.path.dirname(__file__))
lookupmatrix = pickle.load(open(os.path.join(mydir, 'accuweather_location_codes.dump'), 'rb'))
lookuplist = lookupmatrix.tolist()
def build_url(city):
# check whether input is a string
if type(city) != str:
raise(bad.Type("The input city " +str(city) +" wasn't of type string"))
index = lookuplist[1].index(city)
accuweather_index = lookuplist[0][index]
url = 'http://realtek.accu-weather.com/widget/realtek/weather-data.asp' \
+ '?location=cityId:' \
+ str(accuweather_index)
return url
|
-
import numpy as np
import pickle
import os
import sys
import ws.bad as bad
mydir = os.path.abspath(os.path.dirname(__file__))
+ lookupmatrix = pickle.load(open(os.path.join(mydir, 'accuweather_location_codes.dump'), 'rb'))
-
- print(mydir)
-
- lookupmatrix = pickle.load(open( \
- mydir +'/accuweather_location_codes.dump','rb'))
-
lookuplist = lookupmatrix.tolist()
def build_url(city):
# check whether input is a string
if type(city) != str:
raise(bad.Type("The input city " +str(city) +" wasn't of type string"))
+
-
-
index = lookuplist[1].index(city)
accuweather_index = lookuplist[0][index]
-
+
url = 'http://realtek.accu-weather.com/widget/realtek/weather-data.asp' \
+ '?location=cityId:' \
+ str(accuweather_index)
return url
|
7698a548102fae9d801cf2a5cb94163d8e4a39f2
|
kolla_mesos/tests/base.py
|
kolla_mesos/tests/base.py
|
import contextlib
from oslotest import base
import six
import testscenarios
# Python 3, thank you for dropping contextlib.nested
if six.PY3:
@contextlib.contextmanager
def nested(*contexts):
with contextlib.ExitStack() as stack:
yield [stack.enter_context(c) for c in contexts]
else:
nested = contextlib.nested
class BaseTestCase(testscenarios.WithScenarios,
base.BaseTestCase):
"""Test case base class for all unit tests."""
|
import contextlib
from oslo_config import cfg
from oslotest import base
import six
import testscenarios
# Python 3, thank you for dropping contextlib.nested
if six.PY3:
@contextlib.contextmanager
def nested(*contexts):
with contextlib.ExitStack() as stack:
yield [stack.enter_context(c) for c in contexts]
else:
nested = contextlib.nested
class BaseTestCase(testscenarios.WithScenarios,
base.BaseTestCase):
"""Test case base class for all unit tests."""
def setUp(self):
super(BaseTestCase, self).setUp()
self.addCleanup(cfg.CONF.reset)
|
Make sure any config overrides are reset
|
Make sure any config overrides are reset
The new deploy unit tests have conf.set_override() but
don't reset these after each test, so depending on the
test order there are failures.
This just addes a generic cleanup in the base class to catch
all of them.
Change-Id: Idb1b4ef158808b08657f34bfe1546fa2ec0dae9f
|
Python
|
apache-2.0
|
asalkeld/kolla-mesos,openstack/kolla-mesos,openstack/kolla-mesos,openstack/kolla-mesos
|
import contextlib
+ from oslo_config import cfg
from oslotest import base
import six
import testscenarios
# Python 3, thank you for dropping contextlib.nested
if six.PY3:
@contextlib.contextmanager
def nested(*contexts):
with contextlib.ExitStack() as stack:
yield [stack.enter_context(c) for c in contexts]
else:
nested = contextlib.nested
class BaseTestCase(testscenarios.WithScenarios,
base.BaseTestCase):
-
"""Test case base class for all unit tests."""
+ def setUp(self):
+ super(BaseTestCase, self).setUp()
+ self.addCleanup(cfg.CONF.reset)
+
|
Make sure any config overrides are reset
|
## Code Before:
import contextlib
from oslotest import base
import six
import testscenarios
# Python 3, thank you for dropping contextlib.nested
if six.PY3:
@contextlib.contextmanager
def nested(*contexts):
with contextlib.ExitStack() as stack:
yield [stack.enter_context(c) for c in contexts]
else:
nested = contextlib.nested
class BaseTestCase(testscenarios.WithScenarios,
base.BaseTestCase):
"""Test case base class for all unit tests."""
## Instruction:
Make sure any config overrides are reset
## Code After:
import contextlib
from oslo_config import cfg
from oslotest import base
import six
import testscenarios
# Python 3, thank you for dropping contextlib.nested
if six.PY3:
@contextlib.contextmanager
def nested(*contexts):
with contextlib.ExitStack() as stack:
yield [stack.enter_context(c) for c in contexts]
else:
nested = contextlib.nested
class BaseTestCase(testscenarios.WithScenarios,
base.BaseTestCase):
"""Test case base class for all unit tests."""
def setUp(self):
super(BaseTestCase, self).setUp()
self.addCleanup(cfg.CONF.reset)
|
import contextlib
+ from oslo_config import cfg
from oslotest import base
import six
import testscenarios
# Python 3, thank you for dropping contextlib.nested
if six.PY3:
@contextlib.contextmanager
def nested(*contexts):
with contextlib.ExitStack() as stack:
yield [stack.enter_context(c) for c in contexts]
else:
nested = contextlib.nested
class BaseTestCase(testscenarios.WithScenarios,
base.BaseTestCase):
+ """Test case base class for all unit tests."""
- """Test case base class for all unit tests."""
+ def setUp(self):
+ super(BaseTestCase, self).setUp()
+ self.addCleanup(cfg.CONF.reset)
|
2f0819fa6bea3e6f034516358563086d5ab9aa67
|
dasem/app/__init__.py
|
dasem/app/__init__.py
|
"""Dasem app."""
from __future__ import absolute_import, division, print_function
from flask import Flask
from flask_bootstrap import Bootstrap
from ..dannet import Dannet
from ..semantic import Semantic
app = Flask(__name__)
Bootstrap(app)
app.dasem_dannet = Dannet()
app.dasem_semantic = Semantic()
from . import views
|
"""Dasem app."""
from __future__ import absolute_import, division, print_function
from flask import Flask
from flask_bootstrap import Bootstrap
from ..dannet import Dannet
from ..wikipedia import ExplicitSemanticAnalysis
app = Flask(__name__)
Bootstrap(app)
app.dasem_dannet = Dannet()
app.dasem_wikipedia_esa = ExplicitSemanticAnalysis(display=True)
from . import views
|
Change to use ESA class in other module
|
Change to use ESA class in other module
|
Python
|
apache-2.0
|
fnielsen/dasem,fnielsen/dasem
|
"""Dasem app."""
from __future__ import absolute_import, division, print_function
from flask import Flask
from flask_bootstrap import Bootstrap
from ..dannet import Dannet
- from ..semantic import Semantic
+ from ..wikipedia import ExplicitSemanticAnalysis
app = Flask(__name__)
Bootstrap(app)
app.dasem_dannet = Dannet()
- app.dasem_semantic = Semantic()
+ app.dasem_wikipedia_esa = ExplicitSemanticAnalysis(display=True)
from . import views
|
Change to use ESA class in other module
|
## Code Before:
"""Dasem app."""
from __future__ import absolute_import, division, print_function
from flask import Flask
from flask_bootstrap import Bootstrap
from ..dannet import Dannet
from ..semantic import Semantic
app = Flask(__name__)
Bootstrap(app)
app.dasem_dannet = Dannet()
app.dasem_semantic = Semantic()
from . import views
## Instruction:
Change to use ESA class in other module
## Code After:
"""Dasem app."""
from __future__ import absolute_import, division, print_function
from flask import Flask
from flask_bootstrap import Bootstrap
from ..dannet import Dannet
from ..wikipedia import ExplicitSemanticAnalysis
app = Flask(__name__)
Bootstrap(app)
app.dasem_dannet = Dannet()
app.dasem_wikipedia_esa = ExplicitSemanticAnalysis(display=True)
from . import views
|
"""Dasem app."""
from __future__ import absolute_import, division, print_function
from flask import Flask
from flask_bootstrap import Bootstrap
from ..dannet import Dannet
- from ..semantic import Semantic
+ from ..wikipedia import ExplicitSemanticAnalysis
app = Flask(__name__)
Bootstrap(app)
app.dasem_dannet = Dannet()
- app.dasem_semantic = Semantic()
+ app.dasem_wikipedia_esa = ExplicitSemanticAnalysis(display=True)
from . import views
|
40f140682a902957d5875c8afc88e16bc327367f
|
tests/test_cat2cohort.py
|
tests/test_cat2cohort.py
|
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
values = [
(('Toto', 'fr'), 'Toto, frwiki'),
(('Titi', 'en'), 'Titi, enwiki'),
]
for value, expected in values:
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def setUp(self):
"""Set up the tests."""
self.userlist = [('Toto', 'fr'), ('Titi', 'en')]
self.csvlines = ['Toto, frwiki', 'Titi, enwiki']
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
for value, expected in zip(self.userlist, self.csvlines):
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
Move unit tests data in setUp
|
Move unit tests data in setUp
When unit testing the various methods of cat2cohort,
we need some example data (input and expected output).
It makes sense to share it among testing methods through
the setUp method mechanism.
|
Python
|
mit
|
danmichaelo/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics
|
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
+
+ def setUp(self):
+ """Set up the tests."""
+ self.userlist = [('Toto', 'fr'), ('Titi', 'en')]
+ self.csvlines = ['Toto, frwiki', 'Titi, enwiki']
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
+ for value, expected in zip(self.userlist, self.csvlines):
- values = [
- (('Toto', 'fr'), 'Toto, frwiki'),
- (('Titi', 'en'), 'Titi, enwiki'),
- ]
- for value, expected in values:
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
Move unit tests data in setUp
|
## Code Before:
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
values = [
(('Toto', 'fr'), 'Toto, frwiki'),
(('Titi', 'en'), 'Titi, enwiki'),
]
for value, expected in values:
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
## Instruction:
Move unit tests data in setUp
## Code After:
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
def setUp(self):
"""Set up the tests."""
self.userlist = [('Toto', 'fr'), ('Titi', 'en')]
self.csvlines = ['Toto, frwiki', 'Titi, enwiki']
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
for value, expected in zip(self.userlist, self.csvlines):
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
"""Unit tests for cat2cohort."""
import unittest
from wm_metrics.cat2cohort import api_url, _make_CSV_line, _userlist_to_CSV_cohort
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
+
+ def setUp(self):
+ """Set up the tests."""
+ self.userlist = [('Toto', 'fr'), ('Titi', 'en')]
+ self.csvlines = ['Toto, frwiki', 'Titi, enwiki']
def test_api_url(self):
"""Test api_url."""
values = [
('fr', 'https://fr.wikipedia.org/w/api.php'),
('en', 'https://en.wikipedia.org/w/api.php'),
]
for value, expected in values:
self.assertEqual(api_url(value), expected)
def test_make_CSV_line(self):
"""Test _make_CSV_line."""
+ for value, expected in zip(self.userlist, self.csvlines):
- values = [
- (('Toto', 'fr'), 'Toto, frwiki'),
- (('Titi', 'en'), 'Titi, enwiki'),
- ]
- for value, expected in values:
self.assertEqual(_make_CSV_line(*value), expected)
def test_userlist_to_CSV_cohort(self):
"""Test _userlist_to_CSV_cohort."""
expected = '\n'.join(self.csvlines)
self.assertEqual(_userlist_to_CSV_cohort(self.userlist),
expected)
|
d5a5e46b2fbc9284213aef3ec45f0605b002b7b1
|
axes/management/commands/axes_reset.py
|
axes/management/commands/axes_reset.py
|
from django.core.management.base import BaseCommand
from axes.utils import reset
class Command(BaseCommand):
help = ("resets any lockouts or failed login records. If called with an "
"IP, resets only for that IP")
def add_arguments(self, parser):
parser.add_argument('ip', nargs='*')
def handle(self, *args, **kwargs):
count = 0
if kwargs:
for ip in kwargs['ip']:
count += reset(ip=ip)
else:
count = reset()
if count:
print('{0} attempts removed.'.format(count))
else:
print('No attempts found.')
|
from django.core.management.base import BaseCommand
from axes.utils import reset
class Command(BaseCommand):
help = ("resets any lockouts or failed login records. If called with an "
"IP, resets only for that IP")
def add_arguments(self, parser):
parser.add_argument('ip', nargs='*')
def handle(self, *args, **kwargs):
count = 0
if kwargs and kwargs.get('ip'):
for ip in kwargs['ip']:
count += reset(ip=ip)
else:
count = reset()
if count:
print('{0} attempts removed.'.format(count))
else:
print('No attempts found.')
|
Reset all attempts when ip not specified
|
Reset all attempts when ip not specified
When no ip address positional arguments are specified, reset all attempts, as with reset() and per documentation.
|
Python
|
mit
|
svenhertle/django-axes,django-pci/django-axes,jazzband/django-axes
|
from django.core.management.base import BaseCommand
from axes.utils import reset
class Command(BaseCommand):
help = ("resets any lockouts or failed login records. If called with an "
"IP, resets only for that IP")
def add_arguments(self, parser):
parser.add_argument('ip', nargs='*')
def handle(self, *args, **kwargs):
count = 0
- if kwargs:
+ if kwargs and kwargs.get('ip'):
for ip in kwargs['ip']:
count += reset(ip=ip)
else:
count = reset()
if count:
print('{0} attempts removed.'.format(count))
else:
print('No attempts found.')
|
Reset all attempts when ip not specified
|
## Code Before:
from django.core.management.base import BaseCommand
from axes.utils import reset
class Command(BaseCommand):
help = ("resets any lockouts or failed login records. If called with an "
"IP, resets only for that IP")
def add_arguments(self, parser):
parser.add_argument('ip', nargs='*')
def handle(self, *args, **kwargs):
count = 0
if kwargs:
for ip in kwargs['ip']:
count += reset(ip=ip)
else:
count = reset()
if count:
print('{0} attempts removed.'.format(count))
else:
print('No attempts found.')
## Instruction:
Reset all attempts when ip not specified
## Code After:
from django.core.management.base import BaseCommand
from axes.utils import reset
class Command(BaseCommand):
help = ("resets any lockouts or failed login records. If called with an "
"IP, resets only for that IP")
def add_arguments(self, parser):
parser.add_argument('ip', nargs='*')
def handle(self, *args, **kwargs):
count = 0
if kwargs and kwargs.get('ip'):
for ip in kwargs['ip']:
count += reset(ip=ip)
else:
count = reset()
if count:
print('{0} attempts removed.'.format(count))
else:
print('No attempts found.')
|
from django.core.management.base import BaseCommand
from axes.utils import reset
class Command(BaseCommand):
help = ("resets any lockouts or failed login records. If called with an "
"IP, resets only for that IP")
def add_arguments(self, parser):
parser.add_argument('ip', nargs='*')
def handle(self, *args, **kwargs):
count = 0
- if kwargs:
+ if kwargs and kwargs.get('ip'):
for ip in kwargs['ip']:
count += reset(ip=ip)
else:
count = reset()
if count:
print('{0} attempts removed.'.format(count))
else:
print('No attempts found.')
|
82b7e46ebdeb154963520fec1d41cc624ceb806d
|
tests/test_vendcrawler.py
|
tests/test_vendcrawler.py
|
import unittest
from vendcrawler.scripts.vendcrawler import VendCrawler
class TestVendCrawlerMethods(unittest.TestCase):
def test_get_links(self):
links = VendCrawler().get_links(2)
self.assertEqual(links,
['https://sarahserver.net/?module=vendor&p=1',
'https://sarahserver.net/?module=vendor&p=2'])
def test_get_page_count(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
page_count = VendCrawler().get_page_count(str(data))
self.assertEqual(int(page_count), 84)
if __name__ == '__main__':
unittest.main()
|
import unittest
from vendcrawler.scripts.vendcrawler import VendCrawler
class TestVendCrawlerMethods(unittest.TestCase):
def test_get_links(self):
links = VendCrawler('a', 'b', 'c').get_links(2)
self.assertEqual(links,
['https://sarahserver.net/?module=vendor&p=1',
'https://sarahserver.net/?module=vendor&p=2'])
def test_get_page_count(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
page_count = VendCrawler('a', 'b', 'c').get_page_count(str(data))
self.assertEqual(int(page_count), 84)
if __name__ == '__main__':
unittest.main()
|
Fix test by passing placeholder variables.
|
Fix test by passing placeholder variables.
|
Python
|
mit
|
josetaas/vendcrawler,josetaas/vendcrawler,josetaas/vendcrawler
|
import unittest
from vendcrawler.scripts.vendcrawler import VendCrawler
class TestVendCrawlerMethods(unittest.TestCase):
def test_get_links(self):
- links = VendCrawler().get_links(2)
+ links = VendCrawler('a', 'b', 'c').get_links(2)
self.assertEqual(links,
['https://sarahserver.net/?module=vendor&p=1',
'https://sarahserver.net/?module=vendor&p=2'])
def test_get_page_count(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
- page_count = VendCrawler().get_page_count(str(data))
+ page_count = VendCrawler('a', 'b', 'c').get_page_count(str(data))
self.assertEqual(int(page_count), 84)
if __name__ == '__main__':
unittest.main()
|
Fix test by passing placeholder variables.
|
## Code Before:
import unittest
from vendcrawler.scripts.vendcrawler import VendCrawler
class TestVendCrawlerMethods(unittest.TestCase):
def test_get_links(self):
links = VendCrawler().get_links(2)
self.assertEqual(links,
['https://sarahserver.net/?module=vendor&p=1',
'https://sarahserver.net/?module=vendor&p=2'])
def test_get_page_count(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
page_count = VendCrawler().get_page_count(str(data))
self.assertEqual(int(page_count), 84)
if __name__ == '__main__':
unittest.main()
## Instruction:
Fix test by passing placeholder variables.
## Code After:
import unittest
from vendcrawler.scripts.vendcrawler import VendCrawler
class TestVendCrawlerMethods(unittest.TestCase):
def test_get_links(self):
links = VendCrawler('a', 'b', 'c').get_links(2)
self.assertEqual(links,
['https://sarahserver.net/?module=vendor&p=1',
'https://sarahserver.net/?module=vendor&p=2'])
def test_get_page_count(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
page_count = VendCrawler('a', 'b', 'c').get_page_count(str(data))
self.assertEqual(int(page_count), 84)
if __name__ == '__main__':
unittest.main()
|
import unittest
from vendcrawler.scripts.vendcrawler import VendCrawler
class TestVendCrawlerMethods(unittest.TestCase):
def test_get_links(self):
- links = VendCrawler().get_links(2)
+ links = VendCrawler('a', 'b', 'c').get_links(2)
? +++++++++++++
self.assertEqual(links,
['https://sarahserver.net/?module=vendor&p=1',
'https://sarahserver.net/?module=vendor&p=2'])
def test_get_page_count(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
- page_count = VendCrawler().get_page_count(str(data))
+ page_count = VendCrawler('a', 'b', 'c').get_page_count(str(data))
? +++++++++++++
self.assertEqual(int(page_count), 84)
if __name__ == '__main__':
unittest.main()
|
c7ccfd82298c2c8c90c230f846ca9319bcf40441
|
lib/tagnews/__init__.py
|
lib/tagnews/__init__.py
|
from . import utils
from . import crimetype
from .crimetype.tag import CrimeTags
from .geoloc.tag import GeoCoder, get_lat_longs_from_geostrings
from .utils.load_data import load_data
from .utils.load_data import load_ner_data
from .utils.load_vectorizer import load_glove
__version__ = '1.0.2'
def test(verbosity=None, **kwargs):
"""run the test suite"""
import pytest
args = kwargs.pop('argv', [])
if verbosity:
args += ['-' + 'v' * verbosity]
return pytest.main(args, **kwargs)
test.__test__ = False # pytest: this function is not a test
|
from . import utils
from . import crimetype
from .crimetype.tag import CrimeTags
from .geoloc.tag import GeoCoder, get_lat_longs_from_geostrings
from .utils.load_data import load_data
from .utils.load_data import load_ner_data
from .utils.load_vectorizer import load_glove
__version__ = '1.0.2'
|
Remove unused test function at top level.
|
Remove unused test function at top level.
|
Python
|
mit
|
kbrose/article-tagging,kbrose/article-tagging,chicago-justice-project/article-tagging,chicago-justice-project/article-tagging
|
from . import utils
from . import crimetype
from .crimetype.tag import CrimeTags
from .geoloc.tag import GeoCoder, get_lat_longs_from_geostrings
from .utils.load_data import load_data
from .utils.load_data import load_ner_data
from .utils.load_vectorizer import load_glove
__version__ = '1.0.2'
- def test(verbosity=None, **kwargs):
- """run the test suite"""
-
- import pytest
-
- args = kwargs.pop('argv', [])
-
- if verbosity:
- args += ['-' + 'v' * verbosity]
-
- return pytest.main(args, **kwargs)
-
-
- test.__test__ = False # pytest: this function is not a test
-
|
Remove unused test function at top level.
|
## Code Before:
from . import utils
from . import crimetype
from .crimetype.tag import CrimeTags
from .geoloc.tag import GeoCoder, get_lat_longs_from_geostrings
from .utils.load_data import load_data
from .utils.load_data import load_ner_data
from .utils.load_vectorizer import load_glove
__version__ = '1.0.2'
def test(verbosity=None, **kwargs):
"""run the test suite"""
import pytest
args = kwargs.pop('argv', [])
if verbosity:
args += ['-' + 'v' * verbosity]
return pytest.main(args, **kwargs)
test.__test__ = False # pytest: this function is not a test
## Instruction:
Remove unused test function at top level.
## Code After:
from . import utils
from . import crimetype
from .crimetype.tag import CrimeTags
from .geoloc.tag import GeoCoder, get_lat_longs_from_geostrings
from .utils.load_data import load_data
from .utils.load_data import load_ner_data
from .utils.load_vectorizer import load_glove
__version__ = '1.0.2'
|
from . import utils
from . import crimetype
from .crimetype.tag import CrimeTags
from .geoloc.tag import GeoCoder, get_lat_longs_from_geostrings
from .utils.load_data import load_data
from .utils.load_data import load_ner_data
from .utils.load_vectorizer import load_glove
__version__ = '1.0.2'
-
- def test(verbosity=None, **kwargs):
- """run the test suite"""
-
- import pytest
-
- args = kwargs.pop('argv', [])
-
- if verbosity:
- args += ['-' + 'v' * verbosity]
-
- return pytest.main(args, **kwargs)
-
-
- test.__test__ = False # pytest: this function is not a test
|
16d6dd0ba2b5218d211c25e3e197d65fe163b09a
|
helusers/providers/helsinki_oidc/views.py
|
helusers/providers/helsinki_oidc/views.py
|
import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter, OAuth2LoginView, OAuth2CallbackView
)
from .provider import HelsinkiOIDCProvider
class HelsinkiOIDCOAuth2Adapter(OAuth2Adapter):
provider_id = HelsinkiOIDCProvider.id
access_token_url = 'https://api.hel.fi/sso-test/openid/token/'
authorize_url = 'https://api.hel.fi/sso-test/openid/authorize/'
profile_url = 'https://api.hel.fi/sso-test/openid/userinfo/'
def complete_login(self, request, app, token, **kwargs):
headers = {'Authorization': 'Bearer {0}'.format(token.token)}
resp = requests.get(self.profile_url, headers=headers)
assert resp.status_code == 200
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(HelsinkiOIDCOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(HelsinkiOIDCOAuth2Adapter)
|
import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter, OAuth2LoginView, OAuth2CallbackView
)
from .provider import HelsinkiOIDCProvider
class HelsinkiOIDCOAuth2Adapter(OAuth2Adapter):
provider_id = HelsinkiOIDCProvider.id
access_token_url = 'https://api.hel.fi/sso/openid/token/'
authorize_url = 'https://api.hel.fi/sso/openid/authorize/'
profile_url = 'https://api.hel.fi/sso/openid/userinfo/'
def complete_login(self, request, app, token, **kwargs):
headers = {'Authorization': 'Bearer {0}'.format(token.token)}
resp = requests.get(self.profile_url, headers=headers)
assert resp.status_code == 200
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(HelsinkiOIDCOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(HelsinkiOIDCOAuth2Adapter)
|
Fix broken Helsinki OIDC provider links
|
Fix broken Helsinki OIDC provider links
|
Python
|
bsd-2-clause
|
City-of-Helsinki/django-helusers,City-of-Helsinki/django-helusers
|
import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter, OAuth2LoginView, OAuth2CallbackView
)
from .provider import HelsinkiOIDCProvider
class HelsinkiOIDCOAuth2Adapter(OAuth2Adapter):
provider_id = HelsinkiOIDCProvider.id
- access_token_url = 'https://api.hel.fi/sso-test/openid/token/'
+ access_token_url = 'https://api.hel.fi/sso/openid/token/'
- authorize_url = 'https://api.hel.fi/sso-test/openid/authorize/'
+ authorize_url = 'https://api.hel.fi/sso/openid/authorize/'
- profile_url = 'https://api.hel.fi/sso-test/openid/userinfo/'
+ profile_url = 'https://api.hel.fi/sso/openid/userinfo/'
def complete_login(self, request, app, token, **kwargs):
headers = {'Authorization': 'Bearer {0}'.format(token.token)}
resp = requests.get(self.profile_url, headers=headers)
assert resp.status_code == 200
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(HelsinkiOIDCOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(HelsinkiOIDCOAuth2Adapter)
|
Fix broken Helsinki OIDC provider links
|
## Code Before:
import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter, OAuth2LoginView, OAuth2CallbackView
)
from .provider import HelsinkiOIDCProvider
class HelsinkiOIDCOAuth2Adapter(OAuth2Adapter):
provider_id = HelsinkiOIDCProvider.id
access_token_url = 'https://api.hel.fi/sso-test/openid/token/'
authorize_url = 'https://api.hel.fi/sso-test/openid/authorize/'
profile_url = 'https://api.hel.fi/sso-test/openid/userinfo/'
def complete_login(self, request, app, token, **kwargs):
headers = {'Authorization': 'Bearer {0}'.format(token.token)}
resp = requests.get(self.profile_url, headers=headers)
assert resp.status_code == 200
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(HelsinkiOIDCOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(HelsinkiOIDCOAuth2Adapter)
## Instruction:
Fix broken Helsinki OIDC provider links
## Code After:
import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter, OAuth2LoginView, OAuth2CallbackView
)
from .provider import HelsinkiOIDCProvider
class HelsinkiOIDCOAuth2Adapter(OAuth2Adapter):
provider_id = HelsinkiOIDCProvider.id
access_token_url = 'https://api.hel.fi/sso/openid/token/'
authorize_url = 'https://api.hel.fi/sso/openid/authorize/'
profile_url = 'https://api.hel.fi/sso/openid/userinfo/'
def complete_login(self, request, app, token, **kwargs):
headers = {'Authorization': 'Bearer {0}'.format(token.token)}
resp = requests.get(self.profile_url, headers=headers)
assert resp.status_code == 200
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(HelsinkiOIDCOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(HelsinkiOIDCOAuth2Adapter)
|
import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter, OAuth2LoginView, OAuth2CallbackView
)
from .provider import HelsinkiOIDCProvider
class HelsinkiOIDCOAuth2Adapter(OAuth2Adapter):
provider_id = HelsinkiOIDCProvider.id
- access_token_url = 'https://api.hel.fi/sso-test/openid/token/'
? -----
+ access_token_url = 'https://api.hel.fi/sso/openid/token/'
- authorize_url = 'https://api.hel.fi/sso-test/openid/authorize/'
? -----
+ authorize_url = 'https://api.hel.fi/sso/openid/authorize/'
- profile_url = 'https://api.hel.fi/sso-test/openid/userinfo/'
? -----
+ profile_url = 'https://api.hel.fi/sso/openid/userinfo/'
def complete_login(self, request, app, token, **kwargs):
headers = {'Authorization': 'Bearer {0}'.format(token.token)}
resp = requests.get(self.profile_url, headers=headers)
assert resp.status_code == 200
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(HelsinkiOIDCOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(HelsinkiOIDCOAuth2Adapter)
|
3607309193c5d8b2b5ce0fd98d976b6e6aa49644
|
test/test_client.py
|
test/test_client.py
|
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
|
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization, VisualizationLocal
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_local_mode(self, lgn):
lgn.local = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, VisualizationLocal)
assert hasattr(viz, 'id')
|
Add test for local visualization
|
Add test for local visualization
|
Python
|
mit
|
garretstuber/lightning-python,garretstuber/lightning-python,peterkshultz/lightning-python,lightning-viz/lightning-python,garretstuber/lightning-python,lightning-viz/lightning-python,peterkshultz/lightning-python,peterkshultz/lightning-python
|
import pytest
from numpy import random, ceil
- from lightning import Lightning, Visualization
+ from lightning import Lightning, Visualization, VisualizationLocal
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
-
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
+ def test_local_mode(self, lgn):
+ lgn.local = True
+ x = random.randn(100)
+ viz = lgn.line(x)
+
+ assert isinstance(viz, VisualizationLocal)
+ assert hasattr(viz, 'id')
+
+
|
Add test for local visualization
|
## Code Before:
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
## Instruction:
Add test for local visualization
## Code After:
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization, VisualizationLocal
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_local_mode(self, lgn):
lgn.local = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, VisualizationLocal)
assert hasattr(viz, 'id')
|
import pytest
from numpy import random, ceil
- from lightning import Lightning, Visualization
+ from lightning import Lightning, Visualization, VisualizationLocal
? ++++++++++++++++++++
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
-
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
+ def test_local_mode(self, lgn):
+
+ lgn.local = True
+ x = random.randn(100)
+ viz = lgn.line(x)
+
+ assert isinstance(viz, VisualizationLocal)
+ assert hasattr(viz, 'id')
+
|
d8d9b16e7264a6b2936b4920ca97f4dd923f29a3
|
crankycoin/services/queue.py
|
crankycoin/services/queue.py
|
import zmq
from crankycoin import config, logger
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e.message)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
import sys
import zmq
from crankycoin import config, logger
WIN32 = 'win32' in sys.platform
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in'] if not WIN32 else config['user']['win_queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out'] if not WIN32 else config['user']['win_queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
Fix `protocol not supported` on Windows
|
Fix `protocol not supported` on Windows
|
Python
|
mit
|
cranklin/crankycoin
|
+ import sys
import zmq
from crankycoin import config, logger
+ WIN32 = 'win32' in sys.platform
class Queue(object):
+ QUEUE_BIND_IN = config['user']['queue_bind_in'] if not WIN32 else config['user']['win_queue_bind_in']
+ QUEUE_BIND_OUT = config['user']['queue_bind_out'] if not WIN32 else config['user']['win_queue_bind_out']
-
- QUEUE_BIND_IN = config['user']['queue_bind_in']
- QUEUE_BIND_OUT = config['user']['queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
- logger.error("could not start queue: %s", e.message)
+ logger.error("could not start queue: %s", e)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
Fix `protocol not supported` on Windows
|
## Code Before:
import zmq
from crankycoin import config, logger
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e.message)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
## Instruction:
Fix `protocol not supported` on Windows
## Code After:
import sys
import zmq
from crankycoin import config, logger
WIN32 = 'win32' in sys.platform
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in'] if not WIN32 else config['user']['win_queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out'] if not WIN32 else config['user']['win_queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
+ import sys
import zmq
from crankycoin import config, logger
+ WIN32 = 'win32' in sys.platform
class Queue(object):
+ QUEUE_BIND_IN = config['user']['queue_bind_in'] if not WIN32 else config['user']['win_queue_bind_in']
+ QUEUE_BIND_OUT = config['user']['queue_bind_out'] if not WIN32 else config['user']['win_queue_bind_out']
-
- QUEUE_BIND_IN = config['user']['queue_bind_in']
- QUEUE_BIND_OUT = config['user']['queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
- logger.error("could not start queue: %s", e.message)
? --------
+ logger.error("could not start queue: %s", e)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
71671f30589464c4d714110a6f00ca6ab327c5c6
|
blogs/middleware.py
|
blogs/middleware.py
|
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host == pro_user.profile.blog_domain:
request.blog_user = pro_user
|
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host in pro_user.profile.blog_domain.split(' '):
request.blog_user = pro_user
|
Allow Pro users to specify multiple domains to serve their blog. Specifically for www/non-www setups.
|
Allow Pro users to specify multiple domains to serve their blog. Specifically for www/non-www setups.
|
Python
|
mit
|
nicksergeant/snipt,nicksergeant/snipt,nicksergeant/snipt
|
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
- if host == pro_user.profile.blog_domain:
+ if host in pro_user.profile.blog_domain.split(' '):
request.blog_user = pro_user
|
Allow Pro users to specify multiple domains to serve their blog. Specifically for www/non-www setups.
|
## Code Before:
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host == pro_user.profile.blog_domain:
request.blog_user = pro_user
## Instruction:
Allow Pro users to specify multiple domains to serve their blog. Specifically for www/non-www setups.
## Code After:
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host in pro_user.profile.blog_domain.split(' '):
request.blog_user = pro_user
|
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
- if host == pro_user.profile.blog_domain:
? ^^
+ if host in pro_user.profile.blog_domain.split(' '):
? ^^ +++++++++++
request.blog_user = pro_user
|
6171b8111359cc54a4af2c3444ce0e0e2db5ba80
|
froide/helper/context_processors.py
|
froide/helper/context_processors.py
|
from django.conf import settings
def froide(request):
return {"froide": settings.FROIDE_CONFIG}
def site_settings(request):
return {"SITE_NAME": settings.SITE_NAME,
"SITE_URL": settings.SITE_URL,
"FROIDE_DRYRUN": settings.FROIDE_DRYRUN,
"FROIDE_DRYRUN_DOMAIN": settings.FROIDE_DRYRUN_DOMAIN}
|
from django.conf import settings
def froide(request):
return {"froide": settings.FROIDE_CONFIG}
def site_settings(request):
return {"SITE_NAME": settings.SITE_NAME,
"SITE_URL": settings.SITE_URL,
"FROIDE_DRYRUN": settings.FROIDE_DRYRUN,
"FROIDE_DRYRUN_DOMAIN": settings.FROIDE_DRYRUN_DOMAIN,
"LANGUAGE_CODE": settings.LANGUAGE_CODE}
|
Add Froide Dry Run Domain and Language Code to context_processor
|
Add Froide Dry Run Domain and Language Code to context_processor
|
Python
|
mit
|
okfse/froide,ryankanno/froide,fin/froide,LilithWittmann/froide,okfse/froide,fin/froide,ryankanno/froide,LilithWittmann/froide,catcosmo/froide,CodeforHawaii/froide,CodeforHawaii/froide,stefanw/froide,catcosmo/froide,ryankanno/froide,catcosmo/froide,catcosmo/froide,okfse/froide,fin/froide,ryankanno/froide,stefanw/froide,ryankanno/froide,CodeforHawaii/froide,LilithWittmann/froide,okfse/froide,stefanw/froide,fin/froide,stefanw/froide,catcosmo/froide,stefanw/froide,LilithWittmann/froide,okfse/froide,CodeforHawaii/froide,LilithWittmann/froide,CodeforHawaii/froide
|
from django.conf import settings
def froide(request):
return {"froide": settings.FROIDE_CONFIG}
def site_settings(request):
return {"SITE_NAME": settings.SITE_NAME,
"SITE_URL": settings.SITE_URL,
"FROIDE_DRYRUN": settings.FROIDE_DRYRUN,
- "FROIDE_DRYRUN_DOMAIN": settings.FROIDE_DRYRUN_DOMAIN}
+ "FROIDE_DRYRUN_DOMAIN": settings.FROIDE_DRYRUN_DOMAIN,
+ "LANGUAGE_CODE": settings.LANGUAGE_CODE}
|
Add Froide Dry Run Domain and Language Code to context_processor
|
## Code Before:
from django.conf import settings
def froide(request):
return {"froide": settings.FROIDE_CONFIG}
def site_settings(request):
return {"SITE_NAME": settings.SITE_NAME,
"SITE_URL": settings.SITE_URL,
"FROIDE_DRYRUN": settings.FROIDE_DRYRUN,
"FROIDE_DRYRUN_DOMAIN": settings.FROIDE_DRYRUN_DOMAIN}
## Instruction:
Add Froide Dry Run Domain and Language Code to context_processor
## Code After:
from django.conf import settings
def froide(request):
return {"froide": settings.FROIDE_CONFIG}
def site_settings(request):
return {"SITE_NAME": settings.SITE_NAME,
"SITE_URL": settings.SITE_URL,
"FROIDE_DRYRUN": settings.FROIDE_DRYRUN,
"FROIDE_DRYRUN_DOMAIN": settings.FROIDE_DRYRUN_DOMAIN,
"LANGUAGE_CODE": settings.LANGUAGE_CODE}
|
from django.conf import settings
def froide(request):
return {"froide": settings.FROIDE_CONFIG}
def site_settings(request):
return {"SITE_NAME": settings.SITE_NAME,
"SITE_URL": settings.SITE_URL,
"FROIDE_DRYRUN": settings.FROIDE_DRYRUN,
- "FROIDE_DRYRUN_DOMAIN": settings.FROIDE_DRYRUN_DOMAIN}
? ^
+ "FROIDE_DRYRUN_DOMAIN": settings.FROIDE_DRYRUN_DOMAIN,
? ^
+ "LANGUAGE_CODE": settings.LANGUAGE_CODE}
|
ab20fb46cf1afb4b59d40a7bd8aba6a29cdebb64
|
eris/pydoc_color.py
|
eris/pydoc_color.py
|
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
if __name__ == "__main__":
main()
|
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
try:
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
except pydoc.ErrorDuringImport as e:
print(e)
return 1
if __name__ == "__main__":
main()
|
Make pydoc quieter on error.
|
tools: Make pydoc quieter on error.
|
Python
|
artistic-2.0
|
ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil,ahamilton/vigil
|
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
+ try:
- print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
+ print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
+ except pydoc.ErrorDuringImport as e:
+ print(e)
+ return 1
if __name__ == "__main__":
main()
|
Make pydoc quieter on error.
|
## Code Before:
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
if __name__ == "__main__":
main()
## Instruction:
Make pydoc quieter on error.
## Code After:
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
try:
print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
except pydoc.ErrorDuringImport as e:
print(e)
return 1
if __name__ == "__main__":
main()
|
import pydoc
import sys
import eris.termstr
class TermDoc(pydoc.TextDoc):
def bold(self, text):
return str(eris.termstr.TermStr(text).bold())
def main():
path = sys.argv[1]
+ try:
- print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
+ print(pydoc.render_doc(pydoc.importfile(path), renderer=TermDoc()))
? ++++
+ except pydoc.ErrorDuringImport as e:
+ print(e)
+ return 1
if __name__ == "__main__":
main()
|
c06ab929e1f7a55ddc0ed978939ea604cad003cb
|
hamper/plugins/roulette.py
|
hamper/plugins/roulette.py
|
import random, datetime
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1,6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
import random
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1, 6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
Revert "This should break the flakes8 check on Travis"
|
Revert "This should break the flakes8 check on Travis"
This reverts commit 91c3d6c30d75ce66228d52c74bf8a4d8e7628670.
|
Python
|
mit
|
hamperbot/hamper,maxking/hamper,iankronquist/hamper
|
- import random, datetime
+ import random
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
- dice = random.randint(1,6)
+ dice = random.randint(1, 6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
Revert "This should break the flakes8 check on Travis"
|
## Code Before:
import random, datetime
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1,6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
## Instruction:
Revert "This should break the flakes8 check on Travis"
## Code After:
import random
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1, 6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
- import random, datetime
+ import random
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
- dice = random.randint(1,6)
+ dice = random.randint(1, 6)
? +
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
11443eda1a192c0f3a4aa8225263b4e312fa5a55
|
spam_lists/exceptions.py
|
spam_lists/exceptions.py
|
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class UnknownCodeError(SpamListsError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError):
'''The API key used to query the service was not authorized'''
|
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError):
'''The API key used to query the service was not authorized'''
|
Make UnknownCodeError additionally extend KeyError
|
Make UnknownCodeError additionally extend KeyError
|
Python
|
mit
|
piotr-rusin/spam-lists
|
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
- class UnknownCodeError(SpamListsError):
+ class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError):
'''The API key used to query the service was not authorized'''
|
Make UnknownCodeError additionally extend KeyError
|
## Code Before:
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class UnknownCodeError(SpamListsError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError):
'''The API key used to query the service was not authorized'''
## Instruction:
Make UnknownCodeError additionally extend KeyError
## Code After:
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError):
'''The API key used to query the service was not authorized'''
|
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
- class UnknownCodeError(SpamListsError):
+ class UnknownCodeError(SpamListsError, KeyError):
? ++++++++++
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError):
'''The API key used to query the service was not authorized'''
|
d1826b00f4b4944161c66e737978bdc87bb57b52
|
polyaxon/libs/decorators.py
|
polyaxon/libs/decorators.py
|
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
ignore_raw = IgnoreRawDecorator
|
from django.conf import settings
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
class RunnerSignalDecorator(object):
"""The `RunnerSignalDecorator` is a decorator to ignore signals related to runner.
This is useful to ignore any signal that is runner specific.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@runner_signal
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if not settings.DEPLOY_RUNNER:
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
ignore_raw = IgnoreRawDecorator
runner_signal = RunnerSignalDecorator
|
Add decorator for runner signals
|
Add decorator for runner signals
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
+ from django.conf import settings
+
+
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
+ class RunnerSignalDecorator(object):
+ """The `RunnerSignalDecorator` is a decorator to ignore signals related to runner.
+
+ This is useful to ignore any signal that is runner specific.
+
+ usage example:
+ @receiver(post_save, sender=settings.AUTH_USER_MODEL)
+ @runner_signal
+ @ignore_raw
+ def my_signal_handler(sender, instance=None, created=False, **kwargs):
+ ...
+ return ...
+ """
+
+ def __init__(self, f):
+ self.f = f
+
+ def __call__(self, *args, **kwargs):
+ if not settings.DEPLOY_RUNNER:
+ # Ignore signal handling for fixture loading
+ return
+
+ return self.f(*args, **kwargs)
+
+
ignore_raw = IgnoreRawDecorator
+ runner_signal = RunnerSignalDecorator
|
Add decorator for runner signals
|
## Code Before:
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
ignore_raw = IgnoreRawDecorator
## Instruction:
Add decorator for runner signals
## Code After:
from django.conf import settings
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
class RunnerSignalDecorator(object):
"""The `RunnerSignalDecorator` is a decorator to ignore signals related to runner.
This is useful to ignore any signal that is runner specific.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@runner_signal
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if not settings.DEPLOY_RUNNER:
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
ignore_raw = IgnoreRawDecorator
runner_signal = RunnerSignalDecorator
|
+ from django.conf import settings
+
+
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
+ class RunnerSignalDecorator(object):
+ """The `RunnerSignalDecorator` is a decorator to ignore signals related to runner.
+
+ This is useful to ignore any signal that is runner specific.
+
+ usage example:
+ @receiver(post_save, sender=settings.AUTH_USER_MODEL)
+ @runner_signal
+ @ignore_raw
+ def my_signal_handler(sender, instance=None, created=False, **kwargs):
+ ...
+ return ...
+ """
+
+ def __init__(self, f):
+ self.f = f
+
+ def __call__(self, *args, **kwargs):
+ if not settings.DEPLOY_RUNNER:
+ # Ignore signal handling for fixture loading
+ return
+
+ return self.f(*args, **kwargs)
+
+
ignore_raw = IgnoreRawDecorator
+ runner_signal = RunnerSignalDecorator
|
e7d271c41dd713750a8224f0e8f65e2d3b119623
|
polyaxon/auditor/service.py
|
polyaxon/auditor/service.py
|
import activitylogs
import tracker
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
|
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def __init__(self):
self.tracker = None
self.activitylogs = None
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
self.tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
self.activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
import activitylogs
import tracker
self.tracker = tracker
self.activitylogs = activitylogs
|
Move event managers imports to setup in auditor
|
Move event managers imports to setup in auditor
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
- import activitylogs
- import tracker
-
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
+ def __init__(self):
+ self.tracker = None
+ self.activitylogs = None
+
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
- tracker.record(event_type=event['event_type'],
+ self.tracker.record(event_type=event['event_type'],
- instance=event['instance'],
- **event['kwargs'])
- activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
+ self.activitylogs.record(event_type=event['event_type'],
+ instance=event['instance'],
+ **event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
+ import activitylogs
+ import tracker
+
+ self.tracker = tracker
+ self.activitylogs = activitylogs
+
|
Move event managers imports to setup in auditor
|
## Code Before:
import activitylogs
import tracker
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
## Instruction:
Move event managers imports to setup in auditor
## Code After:
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def __init__(self):
self.tracker = None
self.activitylogs = None
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
self.tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
self.activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
import activitylogs
import tracker
self.tracker = tracker
self.activitylogs = activitylogs
|
- import activitylogs
- import tracker
-
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
+ def __init__(self):
+ self.tracker = None
+ self.activitylogs = None
+
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
- tracker.record(event_type=event['event_type'],
+ self.tracker.record(event_type=event['event_type'],
? +++++
- instance=event['instance'],
- **event['kwargs'])
- activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
+ self.activitylogs.record(event_type=event['event_type'],
+ instance=event['instance'],
+ **event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
+
+ import activitylogs
+ import tracker
+
+ self.tracker = tracker
+ self.activitylogs = activitylogs
|
f959e9213f27cee5ed5739655d4f85c7d0d442aa
|
tests/functional/customer/test_notification.py
|
tests/functional/customer/test_notification.py
|
from http import client as http_client
from oscar.test.testcases import WebTestCase
from oscar.apps.customer.notifications import services
from oscar.test.factories import UserFactory
from django.urls import reverse
from oscar.apps.customer.models import Notification
class TestAUserWithUnreadNotifications(WebTestCase):
def setUp(self):
self.user = UserFactory()
services.notify_user(self.user, "Test message")
def test_can_see_them_in_page_header(self):
homepage = self.app.get('/', user=self.user)
self.assertEqual(1, homepage.context['num_unread_notifications'])
def test_notification_list_view_shows_user_notifications(self):
response = self.app.get(reverse('customer:notifications-inbox'), user=self.user)
self.assertEqual(1, len(response.context['notifications']))
self.assertEqual(False, response.context['notifications'][0].is_read)
def test_notification_marked_as_read(self):
n = Notification.objects.first()
path = reverse('customer:notifications-detail', kwargs={'pk': n.id})
response = self.app.get(path, user=self.user)
# notification should be marked as read
self.assertEqual(http_client.OK, response.status_code)
n.refresh_from_db()
self.assertTrue(n.is_read)
|
from http import client as http_client
from oscar.test.testcases import WebTestCase
from oscar.apps.customer.notifications import services
from oscar.test.factories import UserFactory
from django.urls import reverse
from oscar.apps.customer.models import Notification
class TestAUserWithUnreadNotifications(WebTestCase):
def setUp(self):
self.user = UserFactory()
services.notify_user(self.user, "Test message")
def test_can_see_them_in_page_header(self):
homepage = self.app.get('/', user=self.user)
if homepage.status_code == 302:
homepage = homepage.follow()
self.assertEqual(1, homepage.context['num_unread_notifications'])
def test_notification_list_view_shows_user_notifications(self):
response = self.app.get(reverse('customer:notifications-inbox'), user=self.user)
self.assertEqual(1, len(response.context['notifications']))
self.assertEqual(False, response.context['notifications'][0].is_read)
def test_notification_marked_as_read(self):
n = Notification.objects.first()
path = reverse('customer:notifications-detail', kwargs={'pk': n.id})
response = self.app.get(path, user=self.user)
# notification should be marked as read
self.assertEqual(http_client.OK, response.status_code)
n.refresh_from_db()
self.assertTrue(n.is_read)
|
Update test in case if home page has redirection.
|
Update test in case if home page has redirection.
|
Python
|
bsd-3-clause
|
solarissmoke/django-oscar,django-oscar/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,sasha0/django-oscar,django-oscar/django-oscar
|
from http import client as http_client
from oscar.test.testcases import WebTestCase
from oscar.apps.customer.notifications import services
from oscar.test.factories import UserFactory
from django.urls import reverse
from oscar.apps.customer.models import Notification
class TestAUserWithUnreadNotifications(WebTestCase):
def setUp(self):
self.user = UserFactory()
services.notify_user(self.user, "Test message")
def test_can_see_them_in_page_header(self):
homepage = self.app.get('/', user=self.user)
+ if homepage.status_code == 302:
+ homepage = homepage.follow()
self.assertEqual(1, homepage.context['num_unread_notifications'])
def test_notification_list_view_shows_user_notifications(self):
response = self.app.get(reverse('customer:notifications-inbox'), user=self.user)
self.assertEqual(1, len(response.context['notifications']))
self.assertEqual(False, response.context['notifications'][0].is_read)
def test_notification_marked_as_read(self):
n = Notification.objects.first()
path = reverse('customer:notifications-detail', kwargs={'pk': n.id})
response = self.app.get(path, user=self.user)
# notification should be marked as read
self.assertEqual(http_client.OK, response.status_code)
n.refresh_from_db()
self.assertTrue(n.is_read)
|
Update test in case if home page has redirection.
|
## Code Before:
from http import client as http_client
from oscar.test.testcases import WebTestCase
from oscar.apps.customer.notifications import services
from oscar.test.factories import UserFactory
from django.urls import reverse
from oscar.apps.customer.models import Notification
class TestAUserWithUnreadNotifications(WebTestCase):
def setUp(self):
self.user = UserFactory()
services.notify_user(self.user, "Test message")
def test_can_see_them_in_page_header(self):
homepage = self.app.get('/', user=self.user)
self.assertEqual(1, homepage.context['num_unread_notifications'])
def test_notification_list_view_shows_user_notifications(self):
response = self.app.get(reverse('customer:notifications-inbox'), user=self.user)
self.assertEqual(1, len(response.context['notifications']))
self.assertEqual(False, response.context['notifications'][0].is_read)
def test_notification_marked_as_read(self):
n = Notification.objects.first()
path = reverse('customer:notifications-detail', kwargs={'pk': n.id})
response = self.app.get(path, user=self.user)
# notification should be marked as read
self.assertEqual(http_client.OK, response.status_code)
n.refresh_from_db()
self.assertTrue(n.is_read)
## Instruction:
Update test in case if home page has redirection.
## Code After:
from http import client as http_client
from oscar.test.testcases import WebTestCase
from oscar.apps.customer.notifications import services
from oscar.test.factories import UserFactory
from django.urls import reverse
from oscar.apps.customer.models import Notification
class TestAUserWithUnreadNotifications(WebTestCase):
def setUp(self):
self.user = UserFactory()
services.notify_user(self.user, "Test message")
def test_can_see_them_in_page_header(self):
homepage = self.app.get('/', user=self.user)
if homepage.status_code == 302:
homepage = homepage.follow()
self.assertEqual(1, homepage.context['num_unread_notifications'])
def test_notification_list_view_shows_user_notifications(self):
response = self.app.get(reverse('customer:notifications-inbox'), user=self.user)
self.assertEqual(1, len(response.context['notifications']))
self.assertEqual(False, response.context['notifications'][0].is_read)
def test_notification_marked_as_read(self):
n = Notification.objects.first()
path = reverse('customer:notifications-detail', kwargs={'pk': n.id})
response = self.app.get(path, user=self.user)
# notification should be marked as read
self.assertEqual(http_client.OK, response.status_code)
n.refresh_from_db()
self.assertTrue(n.is_read)
|
from http import client as http_client
from oscar.test.testcases import WebTestCase
from oscar.apps.customer.notifications import services
from oscar.test.factories import UserFactory
from django.urls import reverse
from oscar.apps.customer.models import Notification
class TestAUserWithUnreadNotifications(WebTestCase):
def setUp(self):
self.user = UserFactory()
services.notify_user(self.user, "Test message")
def test_can_see_them_in_page_header(self):
homepage = self.app.get('/', user=self.user)
+ if homepage.status_code == 302:
+ homepage = homepage.follow()
self.assertEqual(1, homepage.context['num_unread_notifications'])
def test_notification_list_view_shows_user_notifications(self):
response = self.app.get(reverse('customer:notifications-inbox'), user=self.user)
self.assertEqual(1, len(response.context['notifications']))
self.assertEqual(False, response.context['notifications'][0].is_read)
def test_notification_marked_as_read(self):
n = Notification.objects.first()
path = reverse('customer:notifications-detail', kwargs={'pk': n.id})
response = self.app.get(path, user=self.user)
# notification should be marked as read
self.assertEqual(http_client.OK, response.status_code)
n.refresh_from_db()
self.assertTrue(n.is_read)
|
fd819ff0ff1a7d73dd58f152d2c4be8aea18e2d3
|
rebulk/processors.py
|
rebulk/processors.py
|
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
to_remove_matches = set()
for match in matches:
if match.private:
to_remove_matches.add(match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
|
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
for match in list(matches):
if match.private:
matches.remove(match)
return matches
|
Fix issue when a private match is found multiple times
|
Fix issue when a private match is found multiple times
|
Python
|
mit
|
Toilal/rebulk
|
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
- to_remove_matches = set()
- for match in matches:
+ for match in list(matches):
if match.private:
- to_remove_matches.add(match)
-
- for match in list(to_remove_matches):
- matches.remove(match)
+ matches.remove(match)
return matches
|
Fix issue when a private match is found multiple times
|
## Code Before:
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
to_remove_matches = set()
for match in matches:
if match.private:
to_remove_matches.add(match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
## Instruction:
Fix issue when a private match is found multiple times
## Code After:
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
for match in list(matches):
if match.private:
matches.remove(match)
return matches
|
def conflict_prefer_longer(matches):
"""
Remove shorter matches if they conflicts with longer ones
:param matches:
:type matches: rebulk.match.Matches
:param context:
:type context:
:return:
:rtype: list[rebulk.match.Match]
"""
to_remove_matches = set()
for match in filter(lambda match: not match.private, matches):
conflicting_matches = set()
for i in range(*match.span):
conflicting_matches.update(matches.starting(i))
conflicting_matches.update(matches.ending(i))
if conflicting_matches:
# keep the match only if it's the longest
for conflicting_match in filter(lambda match: not match.private, conflicting_matches):
if len(conflicting_match) < len(match):
to_remove_matches.add(conflicting_match)
for match in list(to_remove_matches):
matches.remove(match)
return matches
def remove_private(matches):
"""
Removes private matches.
:param matches:
:type matches:
:return:
:rtype:
"""
- to_remove_matches = set()
- for match in matches:
+ for match in list(matches):
? +++++ +
if match.private:
- to_remove_matches.add(match)
-
- for match in list(to_remove_matches):
- matches.remove(match)
+ matches.remove(match)
? ++++
return matches
|
3b8a54f2ce220de26741aa329ebb45ceeb3b99c5
|
external_file_location/__manifest__.py
|
external_file_location/__manifest__.py
|
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),'
'ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
Fix line lenght in manifest
|
Fix line lenght in manifest
|
Python
|
agpl-3.0
|
thinkopensolutions/server-tools,thinkopensolutions/server-tools
|
{
'name': 'External File Location',
'version': '10.0.1.0.0',
- 'author': 'Akretion,Odoo Community Association (OCA),ThinkOpen Solutions Brasil',
+ 'author': 'Akretion,Odoo Community Association (OCA),'
+ 'ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
Fix line lenght in manifest
|
## Code Before:
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
## Instruction:
Fix line lenght in manifest
## Code After:
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),'
'ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
{
'name': 'External File Location',
'version': '10.0.1.0.0',
- 'author': 'Akretion,Odoo Community Association (OCA),ThinkOpen Solutions Brasil',
? -------------------------- -
+ 'author': 'Akretion,Odoo Community Association (OCA),'
+ 'ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
57015bec555ca2a3f2e5893158d00f2dd2ca441c
|
errs.py
|
errs.py
|
import sys
class ConfigError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ParseError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
|
import sys
class GenericException(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ConfigError(GenericException):
pass
class ParseError(GenericException):
pass
|
Make errors a bit easier to copy
|
Make errors a bit easier to copy
|
Python
|
agpl-3.0
|
OpenTechStrategies/anvil
|
import sys
- class ConfigError(Exception):
+ class GenericException(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
+ class ConfigError(GenericException):
+ pass
+
- class ParseError(Exception):
+ class ParseError(GenericException):
+ pass
- def __init__(self, message):
- self.message = message
-
- sys.stdout.write("\nERROR: " + str(message) + "\n\n")
|
Make errors a bit easier to copy
|
## Code Before:
import sys
class ConfigError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ParseError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
## Instruction:
Make errors a bit easier to copy
## Code After:
import sys
class GenericException(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ConfigError(GenericException):
pass
class ParseError(GenericException):
pass
|
import sys
- class ConfigError(Exception):
+ class GenericException(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
+ class ConfigError(GenericException):
+ pass
- class ParseError(Exception):
- def __init__(self, message):
- self.message = message
-
- sys.stdout.write("\nERROR: " + str(message) + "\n\n")
+ class ParseError(GenericException):
+ pass
+
|
c7143cd725fc829c33ad9f9150e5975deb7be93a
|
irctest/optional_extensions.py
|
irctest/optional_extensions.py
|
import unittest
import operator
import itertools
class OptionalExtensionNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported extension: {}'.format(self.args[0])
class OptionalSaslMechanismNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported SASL mechanism: {}'.format(self.args[0])
class OptionalityReportingTextTestRunner(unittest.TextTestRunner):
def run(self, test):
result = super().run(test)
if result.skipped:
print()
print('Some tests were skipped because the following optional'
'specifications/mechanisms are not supported:')
msg_to_tests = itertools.groupby(result.skipped,
key=operator.itemgetter(1))
for (msg, tests) in msg_to_tests:
print('\t{} ({} test(s))'.format(msg, sum(1 for x in tests)))
return result
|
import unittest
import operator
import itertools
class NotImplementedByController(unittest.SkipTest):
def __str__(self):
return 'Not implemented by controller: {}'.format(self.args[0])
class OptionalExtensionNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported extension: {}'.format(self.args[0])
class OptionalSaslMechanismNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported SASL mechanism: {}'.format(self.args[0])
class OptionalityReportingTextTestRunner(unittest.TextTestRunner):
def run(self, test):
result = super().run(test)
if result.skipped:
print()
print('Some tests were skipped because the following optional'
'specifications/mechanisms are not supported:')
msg_to_tests = itertools.groupby(result.skipped,
key=operator.itemgetter(1))
for (msg, tests) in sorted(msg_to_tests):
print('\t{} ({} test(s))'.format(msg, sum(1 for x in tests)))
return result
|
Add an exception to tell a controller does not implement something.
|
Add an exception to tell a controller does not implement something.
|
Python
|
mit
|
ProgVal/irctest
|
import unittest
import operator
import itertools
+
+ class NotImplementedByController(unittest.SkipTest):
+ def __str__(self):
+ return 'Not implemented by controller: {}'.format(self.args[0])
class OptionalExtensionNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported extension: {}'.format(self.args[0])
class OptionalSaslMechanismNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported SASL mechanism: {}'.format(self.args[0])
class OptionalityReportingTextTestRunner(unittest.TextTestRunner):
def run(self, test):
result = super().run(test)
if result.skipped:
print()
print('Some tests were skipped because the following optional'
'specifications/mechanisms are not supported:')
msg_to_tests = itertools.groupby(result.skipped,
key=operator.itemgetter(1))
- for (msg, tests) in msg_to_tests:
+ for (msg, tests) in sorted(msg_to_tests):
print('\t{} ({} test(s))'.format(msg, sum(1 for x in tests)))
return result
|
Add an exception to tell a controller does not implement something.
|
## Code Before:
import unittest
import operator
import itertools
class OptionalExtensionNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported extension: {}'.format(self.args[0])
class OptionalSaslMechanismNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported SASL mechanism: {}'.format(self.args[0])
class OptionalityReportingTextTestRunner(unittest.TextTestRunner):
def run(self, test):
result = super().run(test)
if result.skipped:
print()
print('Some tests were skipped because the following optional'
'specifications/mechanisms are not supported:')
msg_to_tests = itertools.groupby(result.skipped,
key=operator.itemgetter(1))
for (msg, tests) in msg_to_tests:
print('\t{} ({} test(s))'.format(msg, sum(1 for x in tests)))
return result
## Instruction:
Add an exception to tell a controller does not implement something.
## Code After:
import unittest
import operator
import itertools
class NotImplementedByController(unittest.SkipTest):
def __str__(self):
return 'Not implemented by controller: {}'.format(self.args[0])
class OptionalExtensionNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported extension: {}'.format(self.args[0])
class OptionalSaslMechanismNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported SASL mechanism: {}'.format(self.args[0])
class OptionalityReportingTextTestRunner(unittest.TextTestRunner):
def run(self, test):
result = super().run(test)
if result.skipped:
print()
print('Some tests were skipped because the following optional'
'specifications/mechanisms are not supported:')
msg_to_tests = itertools.groupby(result.skipped,
key=operator.itemgetter(1))
for (msg, tests) in sorted(msg_to_tests):
print('\t{} ({} test(s))'.format(msg, sum(1 for x in tests)))
return result
|
import unittest
import operator
import itertools
+
+ class NotImplementedByController(unittest.SkipTest):
+ def __str__(self):
+ return 'Not implemented by controller: {}'.format(self.args[0])
class OptionalExtensionNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported extension: {}'.format(self.args[0])
class OptionalSaslMechanismNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported SASL mechanism: {}'.format(self.args[0])
class OptionalityReportingTextTestRunner(unittest.TextTestRunner):
def run(self, test):
result = super().run(test)
if result.skipped:
print()
print('Some tests were skipped because the following optional'
'specifications/mechanisms are not supported:')
msg_to_tests = itertools.groupby(result.skipped,
key=operator.itemgetter(1))
- for (msg, tests) in msg_to_tests:
+ for (msg, tests) in sorted(msg_to_tests):
? +++++++ +
print('\t{} ({} test(s))'.format(msg, sum(1 for x in tests)))
return result
|
511abf77f16a7a92dde93a9f1318967b1d237635
|
go_doc_get.py
|
go_doc_get.py
|
import sublime
import sublime_plugin
import webbrowser
def cleanPackage(pkgURI):
pkg = pkgURI.split('.com/')[1]
return pkg
class GoDocGetCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
for region in view.sel():
selected = view.substr(region)
if "github.corp" in selected:
# if corporate go to page
pkg = cleanPackage(selected)
webbrowser.open('https://github.corp.dyndns.com/' + pkg)
elif "github" in selected:
# if public package go to doc
pkg = cleanPackage(selected)
webbrowser.open('https://godoc.org/github.com/' + pkg)
else:
# default to golang proper
webbrowser.open('https://golang.org/pkg/' + selected)
|
import sublime
import sublime_plugin
import webbrowser
def cleanPackage(pkgURI):
pkg = pkgURI.split('.com/')[1]
return pkg
class GoDocGetCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
for region in view.sel():
selected = view.substr(region)
if "github.corp" in selected:
# if corporate go to page on master branch
pkg = cleanPackage(selected)
res = pkg.split('/')
res.insert(2, 'tree/master')
pkg = '/'.join(res)
webbrowser.open('https://github.corp.dyndns.com/' + pkg)
elif "github" in selected:
# if public package go to doc
pkg = cleanPackage(selected)
webbrowser.open('https://godoc.org/github.com/' + pkg)
else:
# default to golang proper
webbrowser.open('https://golang.org/pkg/' + selected)
|
Set specific branch to go to in GitHub
|
Set specific branch to go to in GitHub
|
Python
|
mit
|
lowellmower/go_doc_get
|
import sublime
import sublime_plugin
import webbrowser
def cleanPackage(pkgURI):
pkg = pkgURI.split('.com/')[1]
return pkg
class GoDocGetCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
for region in view.sel():
selected = view.substr(region)
if "github.corp" in selected:
- # if corporate go to page
+ # if corporate go to page on master branch
pkg = cleanPackage(selected)
+ res = pkg.split('/')
+ res.insert(2, 'tree/master')
+ pkg = '/'.join(res)
+
webbrowser.open('https://github.corp.dyndns.com/' + pkg)
elif "github" in selected:
# if public package go to doc
pkg = cleanPackage(selected)
webbrowser.open('https://godoc.org/github.com/' + pkg)
else:
# default to golang proper
webbrowser.open('https://golang.org/pkg/' + selected)
|
Set specific branch to go to in GitHub
|
## Code Before:
import sublime
import sublime_plugin
import webbrowser
def cleanPackage(pkgURI):
pkg = pkgURI.split('.com/')[1]
return pkg
class GoDocGetCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
for region in view.sel():
selected = view.substr(region)
if "github.corp" in selected:
# if corporate go to page
pkg = cleanPackage(selected)
webbrowser.open('https://github.corp.dyndns.com/' + pkg)
elif "github" in selected:
# if public package go to doc
pkg = cleanPackage(selected)
webbrowser.open('https://godoc.org/github.com/' + pkg)
else:
# default to golang proper
webbrowser.open('https://golang.org/pkg/' + selected)
## Instruction:
Set specific branch to go to in GitHub
## Code After:
import sublime
import sublime_plugin
import webbrowser
def cleanPackage(pkgURI):
pkg = pkgURI.split('.com/')[1]
return pkg
class GoDocGetCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
for region in view.sel():
selected = view.substr(region)
if "github.corp" in selected:
# if corporate go to page on master branch
pkg = cleanPackage(selected)
res = pkg.split('/')
res.insert(2, 'tree/master')
pkg = '/'.join(res)
webbrowser.open('https://github.corp.dyndns.com/' + pkg)
elif "github" in selected:
# if public package go to doc
pkg = cleanPackage(selected)
webbrowser.open('https://godoc.org/github.com/' + pkg)
else:
# default to golang proper
webbrowser.open('https://golang.org/pkg/' + selected)
|
import sublime
import sublime_plugin
import webbrowser
def cleanPackage(pkgURI):
pkg = pkgURI.split('.com/')[1]
return pkg
class GoDocGetCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
for region in view.sel():
selected = view.substr(region)
if "github.corp" in selected:
- # if corporate go to page
+ # if corporate go to page on master branch
? +++++++++++++++++
pkg = cleanPackage(selected)
+ res = pkg.split('/')
+ res.insert(2, 'tree/master')
+ pkg = '/'.join(res)
+
webbrowser.open('https://github.corp.dyndns.com/' + pkg)
elif "github" in selected:
# if public package go to doc
pkg = cleanPackage(selected)
webbrowser.open('https://godoc.org/github.com/' + pkg)
else:
# default to golang proper
webbrowser.open('https://golang.org/pkg/' + selected)
|
12b6814e558402032e0c12170c678657f1455d08
|
kpi/deployment_backends/mock_backend.py
|
kpi/deployment_backends/mock_backend.py
|
from base_backend import BaseDeploymentBackend
class MockDeploymentBackend(BaseDeploymentBackend):
'''
only used for unit testing and interface testing.
defines the interface for a deployment backend.
'''
def connect(self, identifier=None, active=False):
if not identifier:
identifier = '/assets/%s/deployment/' % self.asset.uid
self.store_data({
'backend': 'mock',
'identifier': identifier,
'active': active,
})
def set_active(self, active):
self.store_data({
'active': bool(active),
})
|
from base_backend import BaseDeploymentBackend
class MockDeploymentBackend(BaseDeploymentBackend):
'''
only used for unit testing and interface testing.
defines the interface for a deployment backend.
'''
def connect(self, identifier=None, active=False):
if not identifier:
identifier = '/assets/%s/deployment/' % self.asset.uid
self.store_data({
'backend': 'mock',
'identifier': identifier,
'active': active,
})
def set_active(self, active):
self.store_data({
'active': bool(active),
})
def get_enketo_survey_links(self):
# `self` is a demo Enketo form, but there's no guarantee it'll be
# around forever.
return {
'url': 'https://enke.to/::self',
'iframe_url': 'https://enke.to/i/::self',
'offline_url': 'https://enke.to/_/#self',
'preview_url': 'https://enke.to/preview/::self',
'preview_iframe_url': 'https://enke.to/preview/i/::self',
}
|
Add `get_enketo_survey_links` to mock backend
|
Add `get_enketo_survey_links` to mock backend
|
Python
|
agpl-3.0
|
onaio/kpi,onaio/kpi,kobotoolbox/kpi,onaio/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,onaio/kpi
|
from base_backend import BaseDeploymentBackend
class MockDeploymentBackend(BaseDeploymentBackend):
'''
only used for unit testing and interface testing.
defines the interface for a deployment backend.
'''
def connect(self, identifier=None, active=False):
if not identifier:
identifier = '/assets/%s/deployment/' % self.asset.uid
self.store_data({
'backend': 'mock',
'identifier': identifier,
'active': active,
})
def set_active(self, active):
self.store_data({
'active': bool(active),
})
+ def get_enketo_survey_links(self):
+ # `self` is a demo Enketo form, but there's no guarantee it'll be
+ # around forever.
+ return {
+ 'url': 'https://enke.to/::self',
+ 'iframe_url': 'https://enke.to/i/::self',
+ 'offline_url': 'https://enke.to/_/#self',
+ 'preview_url': 'https://enke.to/preview/::self',
+ 'preview_iframe_url': 'https://enke.to/preview/i/::self',
+ }
+
|
Add `get_enketo_survey_links` to mock backend
|
## Code Before:
from base_backend import BaseDeploymentBackend
class MockDeploymentBackend(BaseDeploymentBackend):
'''
only used for unit testing and interface testing.
defines the interface for a deployment backend.
'''
def connect(self, identifier=None, active=False):
if not identifier:
identifier = '/assets/%s/deployment/' % self.asset.uid
self.store_data({
'backend': 'mock',
'identifier': identifier,
'active': active,
})
def set_active(self, active):
self.store_data({
'active': bool(active),
})
## Instruction:
Add `get_enketo_survey_links` to mock backend
## Code After:
from base_backend import BaseDeploymentBackend
class MockDeploymentBackend(BaseDeploymentBackend):
'''
only used for unit testing and interface testing.
defines the interface for a deployment backend.
'''
def connect(self, identifier=None, active=False):
if not identifier:
identifier = '/assets/%s/deployment/' % self.asset.uid
self.store_data({
'backend': 'mock',
'identifier': identifier,
'active': active,
})
def set_active(self, active):
self.store_data({
'active': bool(active),
})
def get_enketo_survey_links(self):
# `self` is a demo Enketo form, but there's no guarantee it'll be
# around forever.
return {
'url': 'https://enke.to/::self',
'iframe_url': 'https://enke.to/i/::self',
'offline_url': 'https://enke.to/_/#self',
'preview_url': 'https://enke.to/preview/::self',
'preview_iframe_url': 'https://enke.to/preview/i/::self',
}
|
from base_backend import BaseDeploymentBackend
class MockDeploymentBackend(BaseDeploymentBackend):
'''
only used for unit testing and interface testing.
defines the interface for a deployment backend.
'''
def connect(self, identifier=None, active=False):
if not identifier:
identifier = '/assets/%s/deployment/' % self.asset.uid
self.store_data({
'backend': 'mock',
'identifier': identifier,
'active': active,
})
def set_active(self, active):
self.store_data({
'active': bool(active),
})
+
+ def get_enketo_survey_links(self):
+ # `self` is a demo Enketo form, but there's no guarantee it'll be
+ # around forever.
+ return {
+ 'url': 'https://enke.to/::self',
+ 'iframe_url': 'https://enke.to/i/::self',
+ 'offline_url': 'https://enke.to/_/#self',
+ 'preview_url': 'https://enke.to/preview/::self',
+ 'preview_iframe_url': 'https://enke.to/preview/i/::self',
+ }
|
3224ea27a23e1c254bb93a110be1bd481585cb99
|
mosecom_air/api/models.py
|
mosecom_air/api/models.py
|
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
|
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
class Meta:
index_together = [
['station', 'substance', 'unit', 'performed']
]
|
Add index for Measurement model
|
Add index for Measurement model
|
Python
|
mit
|
elsid/mosecom-air,elsid/mosecom-air,elsid/mosecom-air
|
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
+ class Meta:
+ index_together = [
+ ['station', 'substance', 'unit', 'performed']
+ ]
+
|
Add index for Measurement model
|
## Code Before:
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
## Instruction:
Add index for Measurement model
## Code After:
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
class Meta:
index_together = [
['station', 'substance', 'unit', 'performed']
]
|
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
+
+ class Meta:
+ index_together = [
+ ['station', 'substance', 'unit', 'performed']
+ ]
|
b3407617c723d5bac579074262166ac6790be9d6
|
gcloud/dns/__init__.py
|
gcloud/dns/__init__.py
|
from gcloud.dns.client import Client
from gcloud.dns.connection import Connection
|
from gcloud.dns.client import Client
from gcloud.dns.connection import Connection
SCOPE = Connection.SCOPE
|
Add top-level 'SCOPE' alias for DNS.
|
Add top-level 'SCOPE' alias for DNS.
|
Python
|
apache-2.0
|
tartavull/google-cloud-python,dhermes/gcloud-python,jonparrott/gcloud-python,tswast/google-cloud-python,googleapis/google-cloud-python,Fkawala/gcloud-python,waprin/google-cloud-python,tseaver/google-cloud-python,daspecster/google-cloud-python,tswast/google-cloud-python,GoogleCloudPlatform/gcloud-python,calpeyser/google-cloud-python,quom/google-cloud-python,jonparrott/google-cloud-python,tartavull/google-cloud-python,jgeewax/gcloud-python,dhermes/gcloud-python,dhermes/google-cloud-python,tseaver/google-cloud-python,tswast/google-cloud-python,jonparrott/gcloud-python,waprin/gcloud-python,Fkawala/gcloud-python,calpeyser/google-cloud-python,quom/google-cloud-python,tseaver/gcloud-python,waprin/google-cloud-python,daspecster/google-cloud-python,VitalLabs/gcloud-python,googleapis/google-cloud-python,VitalLabs/gcloud-python,tseaver/gcloud-python,dhermes/google-cloud-python,elibixby/gcloud-python,waprin/gcloud-python,GoogleCloudPlatform/gcloud-python,tseaver/google-cloud-python,elibixby/gcloud-python,jgeewax/gcloud-python,dhermes/google-cloud-python,jonparrott/google-cloud-python
|
from gcloud.dns.client import Client
from gcloud.dns.connection import Connection
+
+ SCOPE = Connection.SCOPE
+
|
Add top-level 'SCOPE' alias for DNS.
|
## Code Before:
from gcloud.dns.client import Client
from gcloud.dns.connection import Connection
## Instruction:
Add top-level 'SCOPE' alias for DNS.
## Code After:
from gcloud.dns.client import Client
from gcloud.dns.connection import Connection
SCOPE = Connection.SCOPE
|
from gcloud.dns.client import Client
from gcloud.dns.connection import Connection
+
+
+ SCOPE = Connection.SCOPE
|
dd27eea0ea43447dad321b4b9ec88f24e5ada268
|
asv/__init__.py
|
asv/__init__.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
import sys
if sys.version_info >= (3, 3):
# OS X framework builds of Python 3.3 can not call other 3.3
# virtualenvs as a subprocess because `__PYENV_LAUNCHER__` is
# inherited.
if os.environ.get('__PYVENV_LAUNCHER__'):
os.unsetenv('__PYVENV_LAUNCHER__')
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
import sys
if sys.version_info >= (3, 3):
# OS X framework builds of Python 3.3 can not call other 3.3
# virtualenvs as a subprocess because `__PYENV_LAUNCHER__` is
# inherited.
if os.environ.get('__PYVENV_LAUNCHER__'):
os.unsetenv('__PYVENV_LAUNCHER__')
def check_version_compatibility():
"""
Performs a number of compatibility checks with third-party
libraries.
"""
from distutils.version import LooseVersion
if sys.version_info[0] == 3:
import virtualenv
if LooseVersion(virtualenv.__version__) == LooseVersion('1.11'):
raise RuntimeError("asv is not compatible with Python 3.x and virtualenv 1.11")
check_version_compatibility()
|
Add version check for incompatible Python 3.x/virtualenv 1.11 combination
|
Add version check for incompatible Python 3.x/virtualenv 1.11 combination
|
Python
|
bsd-3-clause
|
pv/asv,mdboom/asv,airspeed-velocity/asv,waylonflinn/asv,airspeed-velocity/asv,giltis/asv,cpcloud/asv,spacetelescope/asv,mdboom/asv,giltis/asv,edisongustavo/asv,qwhelan/asv,airspeed-velocity/asv,pv/asv,cpcloud/asv,airspeed-velocity/asv,spacetelescope/asv,mdboom/asv,ericdill/asv,ericdill/asv,pv/asv,cpcloud/asv,mdboom/asv,edisongustavo/asv,qwhelan/asv,pv/asv,qwhelan/asv,spacetelescope/asv,edisongustavo/asv,ericdill/asv,spacetelescope/asv,ericdill/asv,qwhelan/asv,waylonflinn/asv,giltis/asv,waylonflinn/asv
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
import sys
if sys.version_info >= (3, 3):
# OS X framework builds of Python 3.3 can not call other 3.3
# virtualenvs as a subprocess because `__PYENV_LAUNCHER__` is
# inherited.
if os.environ.get('__PYVENV_LAUNCHER__'):
os.unsetenv('__PYVENV_LAUNCHER__')
+
+ def check_version_compatibility():
+ """
+ Performs a number of compatibility checks with third-party
+ libraries.
+ """
+ from distutils.version import LooseVersion
+
+ if sys.version_info[0] == 3:
+ import virtualenv
+ if LooseVersion(virtualenv.__version__) == LooseVersion('1.11'):
+ raise RuntimeError("asv is not compatible with Python 3.x and virtualenv 1.11")
+
+
+ check_version_compatibility()
+
|
Add version check for incompatible Python 3.x/virtualenv 1.11 combination
|
## Code Before:
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
import sys
if sys.version_info >= (3, 3):
# OS X framework builds of Python 3.3 can not call other 3.3
# virtualenvs as a subprocess because `__PYENV_LAUNCHER__` is
# inherited.
if os.environ.get('__PYVENV_LAUNCHER__'):
os.unsetenv('__PYVENV_LAUNCHER__')
## Instruction:
Add version check for incompatible Python 3.x/virtualenv 1.11 combination
## Code After:
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
import sys
if sys.version_info >= (3, 3):
# OS X framework builds of Python 3.3 can not call other 3.3
# virtualenvs as a subprocess because `__PYENV_LAUNCHER__` is
# inherited.
if os.environ.get('__PYVENV_LAUNCHER__'):
os.unsetenv('__PYVENV_LAUNCHER__')
def check_version_compatibility():
"""
Performs a number of compatibility checks with third-party
libraries.
"""
from distutils.version import LooseVersion
if sys.version_info[0] == 3:
import virtualenv
if LooseVersion(virtualenv.__version__) == LooseVersion('1.11'):
raise RuntimeError("asv is not compatible with Python 3.x and virtualenv 1.11")
check_version_compatibility()
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
import sys
if sys.version_info >= (3, 3):
# OS X framework builds of Python 3.3 can not call other 3.3
# virtualenvs as a subprocess because `__PYENV_LAUNCHER__` is
# inherited.
if os.environ.get('__PYVENV_LAUNCHER__'):
os.unsetenv('__PYVENV_LAUNCHER__')
+
+
+ def check_version_compatibility():
+ """
+ Performs a number of compatibility checks with third-party
+ libraries.
+ """
+ from distutils.version import LooseVersion
+
+ if sys.version_info[0] == 3:
+ import virtualenv
+ if LooseVersion(virtualenv.__version__) == LooseVersion('1.11'):
+ raise RuntimeError("asv is not compatible with Python 3.x and virtualenv 1.11")
+
+
+ check_version_compatibility()
|
d4e5af537be36bd50405e60fdb46f31b88537916
|
src/commoner_i/views.py
|
src/commoner_i/views.py
|
from django.core.files.storage import default_storage
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from django.http import HttpResponse
def badge(request, username, size=''):
# serve the inactive badge by default
filename = 'images/badge/%sinactive.png' % size
# get a handle for the user profile
profile = get_object_or_404(User, username=username)
profile = profile.get_profile()
if profile.active:
# serve the active badge
filename = 'images/badge%s/active.png' % size
# set the content type appropriately
return HttpResponse(default_storage.open(filename).read(),
content_type='image/png')
|
from django.core.files.storage import default_storage
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from django.http import HttpResponse, Http404
def badge(request, username, size=''):
# serve the inactive badge by default
filename = 'images/badge/%sinactive.png' % size
# get a handle for the user profile
profile = get_object_or_404(User, username=username)
profile = profile.get_profile()
if profile.free:
# return a 404 for FREE profiles
raise Http404
if profile.active:
# serve the active badge
filename = 'images/badge%s/active.png' % size
# set the content type appropriately
return HttpResponse(default_storage.open(filename).read(),
content_type='image/png')
|
Raise a 404 when for FREE profile badge requests
|
Raise a 404 when for FREE profile badge requests
|
Python
|
agpl-3.0
|
cc-archive/commoner,cc-archive/commoner
|
from django.core.files.storage import default_storage
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
- from django.http import HttpResponse
+ from django.http import HttpResponse, Http404
def badge(request, username, size=''):
# serve the inactive badge by default
filename = 'images/badge/%sinactive.png' % size
# get a handle for the user profile
profile = get_object_or_404(User, username=username)
profile = profile.get_profile()
+ if profile.free:
+ # return a 404 for FREE profiles
+ raise Http404
+
if profile.active:
- # serve the active badge
+ # serve the active badge
- filename = 'images/badge%s/active.png' % size
+ filename = 'images/badge%s/active.png' % size
# set the content type appropriately
return HttpResponse(default_storage.open(filename).read(),
content_type='image/png')
|
Raise a 404 when for FREE profile badge requests
|
## Code Before:
from django.core.files.storage import default_storage
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from django.http import HttpResponse
def badge(request, username, size=''):
# serve the inactive badge by default
filename = 'images/badge/%sinactive.png' % size
# get a handle for the user profile
profile = get_object_or_404(User, username=username)
profile = profile.get_profile()
if profile.active:
# serve the active badge
filename = 'images/badge%s/active.png' % size
# set the content type appropriately
return HttpResponse(default_storage.open(filename).read(),
content_type='image/png')
## Instruction:
Raise a 404 when for FREE profile badge requests
## Code After:
from django.core.files.storage import default_storage
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from django.http import HttpResponse, Http404
def badge(request, username, size=''):
# serve the inactive badge by default
filename = 'images/badge/%sinactive.png' % size
# get a handle for the user profile
profile = get_object_or_404(User, username=username)
profile = profile.get_profile()
if profile.free:
# return a 404 for FREE profiles
raise Http404
if profile.active:
# serve the active badge
filename = 'images/badge%s/active.png' % size
# set the content type appropriately
return HttpResponse(default_storage.open(filename).read(),
content_type='image/png')
|
from django.core.files.storage import default_storage
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
- from django.http import HttpResponse
+ from django.http import HttpResponse, Http404
? +++++++++
def badge(request, username, size=''):
# serve the inactive badge by default
filename = 'images/badge/%sinactive.png' % size
# get a handle for the user profile
profile = get_object_or_404(User, username=username)
profile = profile.get_profile()
+ if profile.free:
+ # return a 404 for FREE profiles
+ raise Http404
+
if profile.active:
- # serve the active badge
+ # serve the active badge
? +
- filename = 'images/badge%s/active.png' % size
+ filename = 'images/badge%s/active.png' % size
? +
# set the content type appropriately
return HttpResponse(default_storage.open(filename).read(),
content_type='image/png')
|
b207cd8005a0d3a56dc87cc1194458128f94a675
|
awacs/helpers/trust.py
|
awacs/helpers/trust.py
|
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
|
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def make_simple_assume_statement(principal):
return Statement(
Principal=Principal('Service', [principal]),
Effect=Allow,
Action=[sts.AssumeRole])
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
|
Simplify the code a little
|
Simplify the code a little
|
Python
|
bsd-2-clause
|
craigbruce/awacs,cloudtools/awacs
|
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
+
+
+ def make_simple_assume_statement(principal):
+ return Statement(
+ Principal=Principal('Service', [principal]),
+ Effect=Allow,
+ Action=[sts.AssumeRole])
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
+ Statement=[make_simple_assume_statement(service)]
- Statement=[
- Statement(
- Principal=Principal('Service', [service]),
- Effect=Allow,
- Action=[sts.AssumeRole]
- )
- ]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
+ Statement=[make_simple_assume_statement(service)]
- Statement=[
- Statement(
- Principal=Principal('Service', [service]),
- Effect=Allow,
- Action=[sts.AssumeRole]
- )
- ]
)
return policy
|
Simplify the code a little
|
## Code Before:
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
## Instruction:
Simplify the code a little
## Code After:
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def make_simple_assume_statement(principal):
return Statement(
Principal=Principal('Service', [principal]),
Effect=Allow,
Action=[sts.AssumeRole])
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
|
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
+
+
+ def make_simple_assume_statement(principal):
+ return Statement(
+ Principal=Principal('Service', [principal]),
+ Effect=Allow,
+ Action=[sts.AssumeRole])
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
+ Statement=[make_simple_assume_statement(service)]
- Statement=[
- Statement(
- Principal=Principal('Service', [service]),
- Effect=Allow,
- Action=[sts.AssumeRole]
- )
- ]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
+ Statement=[make_simple_assume_statement(service)]
- Statement=[
- Statement(
- Principal=Principal('Service', [service]),
- Effect=Allow,
- Action=[sts.AssumeRole]
- )
- ]
)
return policy
|
d8b477083866a105947281ca34cb6e215417f44d
|
packs/salt/actions/lib/utils.py
|
packs/salt/actions/lib/utils.py
|
import yaml
action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Runner functions through Salt API",
"enabled": True,
"entry_point": "runner.py"}
def generate_action(module_type, action):
manifest = action_meta
manifest['name'] = "{0}_{1}".format(module_type, action)
manifest['parameters']['action']['default'] = action
fh = open('{0}_{1}.yaml'.format(module_type, action), 'w')
fh.write('---\n')
fh.write(yaml.dump(manifest, default_flow_style=False))
fh.close()
def sanitize_payload(keys_to_sanitize, payload):
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
return data
|
import yaml
from .meta import actions
runner_action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Runner functions through Salt API",
"enabled": True,
"entry_point": "runner.py"}
local_action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"args": {
"type": "array",
"required": False
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Execution modules through Salt API",
"enabled": True,
"entry_point": "local.py"}
def generate_actions():
def create_file(mt, m, a):
manifest = local_action_meta
manifest['name'] = "{0}_{1}.{2}".format(mt, m, a)
manifest['parameters']['action']['default'] = "{0}.{1}".format(m, a)
fh = open('{0}_{1}.{2}.yaml'.format(mt, m, a), 'w')
fh.write('---\n')
fh.write(yaml.dump(manifest, default_flow_style=False))
fh.close()
for key in actions:
map(lambda l: create_file('local', key, l), actions[key])
def sanitize_payload(keys_to_sanitize, payload):
'''
Removes sensitive data from payloads before
publishing to the logs
'''
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
return data
|
Make distinction between local and runner action payload templates. Added small description for sanitizing the NetAPI payload for logging.
|
Make distinction between local and runner action payload templates.
Added small description for sanitizing the NetAPI payload for logging.
|
Python
|
apache-2.0
|
pidah/st2contrib,StackStorm/st2contrib,psychopenguin/st2contrib,lmEshoo/st2contrib,armab/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,digideskio/st2contrib,digideskio/st2contrib,armab/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,tonybaloney/st2contrib,psychopenguin/st2contrib,pearsontechnology/st2contrib,pidah/st2contrib,armab/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,StackStorm/st2contrib,pidah/st2contrib
|
+
import yaml
+ from .meta import actions
- action_meta = {
+ runner_action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Runner functions through Salt API",
"enabled": True,
"entry_point": "runner.py"}
+ local_action_meta = {
+ "name": "",
+ "parameters": {
+ "action": {
+ "type": "string",
+ "immutable": True,
+ "default": ""
+ },
+ "args": {
+ "type": "array",
+ "required": False
+ },
+ "kwargs": {
+ "type": "object",
+ "required": False
+ }
+ },
+ "runner_type": "run-python",
+ "description": "Run Salt Execution modules through Salt API",
+ "enabled": True,
+ "entry_point": "local.py"}
- def generate_action(module_type, action):
- manifest = action_meta
- manifest['name'] = "{0}_{1}".format(module_type, action)
- manifest['parameters']['action']['default'] = action
+ def generate_actions():
+ def create_file(mt, m, a):
+ manifest = local_action_meta
+ manifest['name'] = "{0}_{1}.{2}".format(mt, m, a)
+ manifest['parameters']['action']['default'] = "{0}.{1}".format(m, a)
+
- fh = open('{0}_{1}.yaml'.format(module_type, action), 'w')
+ fh = open('{0}_{1}.{2}.yaml'.format(mt, m, a), 'w')
- fh.write('---\n')
+ fh.write('---\n')
- fh.write(yaml.dump(manifest, default_flow_style=False))
+ fh.write(yaml.dump(manifest, default_flow_style=False))
- fh.close()
+ fh.close()
+ for key in actions:
+ map(lambda l: create_file('local', key, l), actions[key])
def sanitize_payload(keys_to_sanitize, payload):
+ '''
+ Removes sensitive data from payloads before
+ publishing to the logs
+ '''
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
return data
|
Make distinction between local and runner action payload templates. Added small description for sanitizing the NetAPI payload for logging.
|
## Code Before:
import yaml
action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Runner functions through Salt API",
"enabled": True,
"entry_point": "runner.py"}
def generate_action(module_type, action):
manifest = action_meta
manifest['name'] = "{0}_{1}".format(module_type, action)
manifest['parameters']['action']['default'] = action
fh = open('{0}_{1}.yaml'.format(module_type, action), 'w')
fh.write('---\n')
fh.write(yaml.dump(manifest, default_flow_style=False))
fh.close()
def sanitize_payload(keys_to_sanitize, payload):
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
return data
## Instruction:
Make distinction between local and runner action payload templates. Added small description for sanitizing the NetAPI payload for logging.
## Code After:
import yaml
from .meta import actions
runner_action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Runner functions through Salt API",
"enabled": True,
"entry_point": "runner.py"}
local_action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"args": {
"type": "array",
"required": False
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Execution modules through Salt API",
"enabled": True,
"entry_point": "local.py"}
def generate_actions():
def create_file(mt, m, a):
manifest = local_action_meta
manifest['name'] = "{0}_{1}.{2}".format(mt, m, a)
manifest['parameters']['action']['default'] = "{0}.{1}".format(m, a)
fh = open('{0}_{1}.{2}.yaml'.format(mt, m, a), 'w')
fh.write('---\n')
fh.write(yaml.dump(manifest, default_flow_style=False))
fh.close()
for key in actions:
map(lambda l: create_file('local', key, l), actions[key])
def sanitize_payload(keys_to_sanitize, payload):
'''
Removes sensitive data from payloads before
publishing to the logs
'''
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
return data
|
+
import yaml
+ from .meta import actions
- action_meta = {
+ runner_action_meta = {
? +++++++
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Runner functions through Salt API",
"enabled": True,
"entry_point": "runner.py"}
+ local_action_meta = {
+ "name": "",
+ "parameters": {
+ "action": {
+ "type": "string",
+ "immutable": True,
+ "default": ""
+ },
+ "args": {
+ "type": "array",
+ "required": False
+ },
+ "kwargs": {
+ "type": "object",
+ "required": False
+ }
+ },
+ "runner_type": "run-python",
+ "description": "Run Salt Execution modules through Salt API",
+ "enabled": True,
+ "entry_point": "local.py"}
- def generate_action(module_type, action):
- manifest = action_meta
- manifest['name'] = "{0}_{1}".format(module_type, action)
- manifest['parameters']['action']['default'] = action
+ def generate_actions():
+ def create_file(mt, m, a):
+ manifest = local_action_meta
+ manifest['name'] = "{0}_{1}.{2}".format(mt, m, a)
+ manifest['parameters']['action']['default'] = "{0}.{1}".format(m, a)
+
- fh = open('{0}_{1}.yaml'.format(module_type, action), 'w')
? ------ ^^^ -----
+ fh = open('{0}_{1}.{2}.yaml'.format(mt, m, a), 'w')
? ++++ ++++ ^^^
- fh.write('---\n')
+ fh.write('---\n')
? ++++
- fh.write(yaml.dump(manifest, default_flow_style=False))
+ fh.write(yaml.dump(manifest, default_flow_style=False))
? ++++
- fh.close()
+ fh.close()
? ++++
+ for key in actions:
+ map(lambda l: create_file('local', key, l), actions[key])
def sanitize_payload(keys_to_sanitize, payload):
+ '''
+ Removes sensitive data from payloads before
+ publishing to the logs
+ '''
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
return data
|
1902ed44f41eabf1c8207e47d5c31dd58471146f
|
pymunk/transform.py
|
pymunk/transform.py
|
from typing import NamedTuple
class Transform(NamedTuple):
"""Type used for 2x3 affine transforms.
See wikipedia for details:
http://en.wikipedia.org/wiki/Affine_transformation
The properties map to the matrix in this way:
= = ==
= = ==
a c tx
b d ty
= = ==
An instance can be created in this way::
>>> Transform(1,2,3,4,5,6)
Transform(a=1, b=2, c=3, d=4, tx=5, ty=6)
Or using the default identity in this way::
>>> Transform.identity()
Transform(a=1, b=0, c=0, d=1, tx=0, ty=0)
Or overriding only some of the values (on a identity matrix):
>>> Transform(b=3,ty=5)
Transform(a=1, b=3, c=0, d=1, tx=0, ty=5)
"""
a: float = 1
b: float = 0
c: float = 0
d: float = 1
tx: float = 0
ty: float = 0
@staticmethod
def identity() -> "Transform":
"""The identity transform"""
return Transform(1, 0, 0, 1, 0, 0)
|
import math
from typing import NamedTuple
class Transform(NamedTuple):
"""Type used for 2x3 affine transforms.
See wikipedia for details:
http://en.wikipedia.org/wiki/Affine_transformation
The properties map to the matrix in this way:
= = ==
= = ==
a c tx
b d ty
= = ==
An instance can be created in this way::
>>> Transform(1,2,3,4,5,6)
Transform(a=1, b=2, c=3, d=4, tx=5, ty=6)
Or using the default identity in this way::
>>> Transform.identity()
Transform(a=1, b=0, c=0, d=1, tx=0, ty=0)
Or overriding only some of the values (on a identity matrix):
>>> Transform(b=3,ty=5)
Transform(a=1, b=3, c=0, d=1, tx=0, ty=5)
"""
a: float = 1
b: float = 0
c: float = 0
d: float = 1
tx: float = 0
ty: float = 0
@staticmethod
def identity() -> "Transform":
"""The identity transform"""
return Transform(1, 0, 0, 1, 0, 0)
@staticmethod
def translation(x, y) -> "Transform":
return Transform(tx=x, ty=y)
# split into scale and scale_non-uniform
@staticmethod
def scaling(v) -> "Transform":
return Transform(a=v, d=v)
@staticmethod
def rotation(t) -> "Transform":
c = math.cos(t)
s = math.sin(t)
return Transform(a=c, b=s, c=-s, d=c)
|
Add some helper methods to create translate, scale and rotate Transforms.
|
Add some helper methods to create translate, scale and rotate Transforms.
|
Python
|
mit
|
viblo/pymunk,viblo/pymunk
|
+ import math
from typing import NamedTuple
class Transform(NamedTuple):
"""Type used for 2x3 affine transforms.
See wikipedia for details:
http://en.wikipedia.org/wiki/Affine_transformation
The properties map to the matrix in this way:
= = ==
= = ==
a c tx
b d ty
= = ==
An instance can be created in this way::
>>> Transform(1,2,3,4,5,6)
Transform(a=1, b=2, c=3, d=4, tx=5, ty=6)
Or using the default identity in this way::
>>> Transform.identity()
Transform(a=1, b=0, c=0, d=1, tx=0, ty=0)
Or overriding only some of the values (on a identity matrix):
>>> Transform(b=3,ty=5)
Transform(a=1, b=3, c=0, d=1, tx=0, ty=5)
"""
a: float = 1
b: float = 0
c: float = 0
d: float = 1
tx: float = 0
ty: float = 0
@staticmethod
def identity() -> "Transform":
"""The identity transform"""
return Transform(1, 0, 0, 1, 0, 0)
+ @staticmethod
+ def translation(x, y) -> "Transform":
+ return Transform(tx=x, ty=y)
+
+ # split into scale and scale_non-uniform
+ @staticmethod
+ def scaling(v) -> "Transform":
+ return Transform(a=v, d=v)
+
+ @staticmethod
+ def rotation(t) -> "Transform":
+ c = math.cos(t)
+ s = math.sin(t)
+ return Transform(a=c, b=s, c=-s, d=c)
+
|
Add some helper methods to create translate, scale and rotate Transforms.
|
## Code Before:
from typing import NamedTuple
class Transform(NamedTuple):
"""Type used for 2x3 affine transforms.
See wikipedia for details:
http://en.wikipedia.org/wiki/Affine_transformation
The properties map to the matrix in this way:
= = ==
= = ==
a c tx
b d ty
= = ==
An instance can be created in this way::
>>> Transform(1,2,3,4,5,6)
Transform(a=1, b=2, c=3, d=4, tx=5, ty=6)
Or using the default identity in this way::
>>> Transform.identity()
Transform(a=1, b=0, c=0, d=1, tx=0, ty=0)
Or overriding only some of the values (on a identity matrix):
>>> Transform(b=3,ty=5)
Transform(a=1, b=3, c=0, d=1, tx=0, ty=5)
"""
a: float = 1
b: float = 0
c: float = 0
d: float = 1
tx: float = 0
ty: float = 0
@staticmethod
def identity() -> "Transform":
"""The identity transform"""
return Transform(1, 0, 0, 1, 0, 0)
## Instruction:
Add some helper methods to create translate, scale and rotate Transforms.
## Code After:
import math
from typing import NamedTuple
class Transform(NamedTuple):
"""Type used for 2x3 affine transforms.
See wikipedia for details:
http://en.wikipedia.org/wiki/Affine_transformation
The properties map to the matrix in this way:
= = ==
= = ==
a c tx
b d ty
= = ==
An instance can be created in this way::
>>> Transform(1,2,3,4,5,6)
Transform(a=1, b=2, c=3, d=4, tx=5, ty=6)
Or using the default identity in this way::
>>> Transform.identity()
Transform(a=1, b=0, c=0, d=1, tx=0, ty=0)
Or overriding only some of the values (on a identity matrix):
>>> Transform(b=3,ty=5)
Transform(a=1, b=3, c=0, d=1, tx=0, ty=5)
"""
a: float = 1
b: float = 0
c: float = 0
d: float = 1
tx: float = 0
ty: float = 0
@staticmethod
def identity() -> "Transform":
"""The identity transform"""
return Transform(1, 0, 0, 1, 0, 0)
@staticmethod
def translation(x, y) -> "Transform":
return Transform(tx=x, ty=y)
# split into scale and scale_non-uniform
@staticmethod
def scaling(v) -> "Transform":
return Transform(a=v, d=v)
@staticmethod
def rotation(t) -> "Transform":
c = math.cos(t)
s = math.sin(t)
return Transform(a=c, b=s, c=-s, d=c)
|
+ import math
from typing import NamedTuple
class Transform(NamedTuple):
"""Type used for 2x3 affine transforms.
See wikipedia for details:
http://en.wikipedia.org/wiki/Affine_transformation
The properties map to the matrix in this way:
= = ==
= = ==
a c tx
b d ty
= = ==
An instance can be created in this way::
>>> Transform(1,2,3,4,5,6)
Transform(a=1, b=2, c=3, d=4, tx=5, ty=6)
Or using the default identity in this way::
>>> Transform.identity()
Transform(a=1, b=0, c=0, d=1, tx=0, ty=0)
Or overriding only some of the values (on a identity matrix):
>>> Transform(b=3,ty=5)
Transform(a=1, b=3, c=0, d=1, tx=0, ty=5)
"""
a: float = 1
b: float = 0
c: float = 0
d: float = 1
tx: float = 0
ty: float = 0
@staticmethod
def identity() -> "Transform":
"""The identity transform"""
return Transform(1, 0, 0, 1, 0, 0)
+
+ @staticmethod
+ def translation(x, y) -> "Transform":
+ return Transform(tx=x, ty=y)
+
+ # split into scale and scale_non-uniform
+ @staticmethod
+ def scaling(v) -> "Transform":
+ return Transform(a=v, d=v)
+
+ @staticmethod
+ def rotation(t) -> "Transform":
+ c = math.cos(t)
+ s = math.sin(t)
+ return Transform(a=c, b=s, c=-s, d=c)
|
0ba616bbd037a1c84f20221a95a623d853da9db9
|
garfield/sims/tests.py
|
garfield/sims/tests.py
|
from mock import patch
from sms.tests.test_sms import GarfieldTwilioTestCase
from sms.tests.test_sms import GarfieldTwilioTestClient
class GarfieldTestSimSmsCaseNewJohn(GarfieldTwilioTestCase):
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_receive_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/receive/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_send_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/send/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
class GarfieldTestSimVoiceCase(GarfieldTwilioTestCase):
def test_sims_receive_call(self):
response = self.client.call("Test.",
path="/sims/voice/receive/")
self.assert_twiml(response)
def test_sims_send_call(self):
response = self.client.call("Test.",
path="/sims/voice/send/")
self.assert_twiml(response)
|
from django.test import override_settings
from mock import patch
from sms.tests.test_sms import GarfieldTwilioTestCase
from sms.tests.test_sms import GarfieldTwilioTestClient
@override_settings(TWILIO_PHONE_NUMBER="+15558675309")
class GarfieldTestSimSmsCaseNewJohn(GarfieldTwilioTestCase):
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_receive_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/receive/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_send_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/send/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
@override_settings(TWILIO_PHONE_NUMBER="+15558675309")
class GarfieldTestSimVoiceCase(GarfieldTwilioTestCase):
def test_sims_receive_call(self):
response = self.client.call("Test.",
path="/sims/voice/receive/")
self.assert_twiml(response)
def test_sims_send_call(self):
response = self.client.call("Test.",
path="/sims/voice/send/")
self.assert_twiml(response)
|
Add override settings for CI without local settings.
|
Add override settings for CI without local settings.
|
Python
|
mit
|
RobSpectre/garfield,RobSpectre/garfield
|
+ from django.test import override_settings
+
from mock import patch
from sms.tests.test_sms import GarfieldTwilioTestCase
from sms.tests.test_sms import GarfieldTwilioTestClient
+ @override_settings(TWILIO_PHONE_NUMBER="+15558675309")
class GarfieldTestSimSmsCaseNewJohn(GarfieldTwilioTestCase):
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_receive_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/receive/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_send_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/send/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
+ @override_settings(TWILIO_PHONE_NUMBER="+15558675309")
class GarfieldTestSimVoiceCase(GarfieldTwilioTestCase):
def test_sims_receive_call(self):
response = self.client.call("Test.",
path="/sims/voice/receive/")
self.assert_twiml(response)
def test_sims_send_call(self):
response = self.client.call("Test.",
path="/sims/voice/send/")
self.assert_twiml(response)
|
Add override settings for CI without local settings.
|
## Code Before:
from mock import patch
from sms.tests.test_sms import GarfieldTwilioTestCase
from sms.tests.test_sms import GarfieldTwilioTestClient
class GarfieldTestSimSmsCaseNewJohn(GarfieldTwilioTestCase):
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_receive_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/receive/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_send_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/send/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
class GarfieldTestSimVoiceCase(GarfieldTwilioTestCase):
def test_sims_receive_call(self):
response = self.client.call("Test.",
path="/sims/voice/receive/")
self.assert_twiml(response)
def test_sims_send_call(self):
response = self.client.call("Test.",
path="/sims/voice/send/")
self.assert_twiml(response)
## Instruction:
Add override settings for CI without local settings.
## Code After:
from django.test import override_settings
from mock import patch
from sms.tests.test_sms import GarfieldTwilioTestCase
from sms.tests.test_sms import GarfieldTwilioTestClient
@override_settings(TWILIO_PHONE_NUMBER="+15558675309")
class GarfieldTestSimSmsCaseNewJohn(GarfieldTwilioTestCase):
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_receive_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/receive/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_send_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/send/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
@override_settings(TWILIO_PHONE_NUMBER="+15558675309")
class GarfieldTestSimVoiceCase(GarfieldTwilioTestCase):
def test_sims_receive_call(self):
response = self.client.call("Test.",
path="/sims/voice/receive/")
self.assert_twiml(response)
def test_sims_send_call(self):
response = self.client.call("Test.",
path="/sims/voice/send/")
self.assert_twiml(response)
|
+ from django.test import override_settings
+
from mock import patch
from sms.tests.test_sms import GarfieldTwilioTestCase
from sms.tests.test_sms import GarfieldTwilioTestClient
+ @override_settings(TWILIO_PHONE_NUMBER="+15558675309")
class GarfieldTestSimSmsCaseNewJohn(GarfieldTwilioTestCase):
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_receive_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/receive/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
@patch('sms.tasks.save_sms_message.apply_async')
def test_sim_send_sms(self, mock_save_sms_message):
response = self.client.sms("Test.",
path="/sims/sms/send/")
self.assert_twiml(response)
self.assertTrue(mock_save_sms_message.called)
+ @override_settings(TWILIO_PHONE_NUMBER="+15558675309")
class GarfieldTestSimVoiceCase(GarfieldTwilioTestCase):
def test_sims_receive_call(self):
response = self.client.call("Test.",
path="/sims/voice/receive/")
self.assert_twiml(response)
def test_sims_send_call(self):
response = self.client.call("Test.",
path="/sims/voice/send/")
self.assert_twiml(response)
|
bc3e31838fd1b5eec3c4ca17f5fab4588ac87904
|
tests/client/test_TelnetClient.py
|
tests/client/test_TelnetClient.py
|
import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect(self, socket_mock):
def callback(raw_message):
pass
client = TelnetClient()
client.run(callback=callback)
|
import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect_disconnect(self, socket_mock):
client = TelnetClient()
client.connect()
client.sock.connect.assert_called_once()
client.disconnect()
client.sock.shutdown.assert_called_once()
client.sock.close.assert_called_once()
@mock.patch('ogn.client.client.socket')
def test_run(self, socket_mock):
def callback(raw_message):
raise ConnectionRefusedError
client = TelnetClient()
client.connect()
client.run(callback=callback)
|
Update to receiver version 0.2.6
|
Update to receiver version 0.2.6
Update to receiver version 0.2.6
Better testing
|
Python
|
agpl-3.0
|
glidernet/python-ogn-client
|
import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
+ def test_connect_disconnect(self, socket_mock):
+ client = TelnetClient()
+ client.connect()
+ client.sock.connect.assert_called_once()
+
+ client.disconnect()
+ client.sock.shutdown.assert_called_once()
+ client.sock.close.assert_called_once()
+
+ @mock.patch('ogn.client.client.socket')
- def test_connect(self, socket_mock):
+ def test_run(self, socket_mock):
def callback(raw_message):
- pass
+ raise ConnectionRefusedError
client = TelnetClient()
+ client.connect()
+
client.run(callback=callback)
|
Update to receiver version 0.2.6
|
## Code Before:
import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect(self, socket_mock):
def callback(raw_message):
pass
client = TelnetClient()
client.run(callback=callback)
## Instruction:
Update to receiver version 0.2.6
## Code After:
import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect_disconnect(self, socket_mock):
client = TelnetClient()
client.connect()
client.sock.connect.assert_called_once()
client.disconnect()
client.sock.shutdown.assert_called_once()
client.sock.close.assert_called_once()
@mock.patch('ogn.client.client.socket')
def test_run(self, socket_mock):
def callback(raw_message):
raise ConnectionRefusedError
client = TelnetClient()
client.connect()
client.run(callback=callback)
|
import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
+ def test_connect_disconnect(self, socket_mock):
+ client = TelnetClient()
+ client.connect()
+ client.sock.connect.assert_called_once()
+
+ client.disconnect()
+ client.sock.shutdown.assert_called_once()
+ client.sock.close.assert_called_once()
+
+ @mock.patch('ogn.client.client.socket')
- def test_connect(self, socket_mock):
? ^^ ----
+ def test_run(self, socket_mock):
? ^^
def callback(raw_message):
- pass
+ raise ConnectionRefusedError
client = TelnetClient()
+ client.connect()
+
client.run(callback=callback)
|
153ed6a519d6836adb02b934cff44974a7132b6d
|
flake8/parseDocTest.py
|
flake8/parseDocTest.py
|
def parseFailDetails(failDetails):
""" Parse the line number of the doctest failure"""
import re
failDetails = failDetails.split(',')
lineNo = -1
if len(failDetails) == 3:
match = re.search("line.*?(\d+)", failDetails[1])
if match is None:
return lineNo
lineNo = int(match.group(1))
return lineNo
def parseDocTestResult(docTestResStr):
""" Extract the line number and filename of the doctest failure"""
lines = docTestResStr.split("\n")
for lineNo, line in enumerate(lines):
failure = line.find("Failed example:")
if failure != -1:
failDetails = lines[lineNo - 1]
yield parseFailDetails(failDetails)
|
def parseFailDetails(failDetails):
""" Parse the line number of the doctest failure
>>> parseFailDetails("blah")
-1
"""
import re
failDetails = failDetails.split(',')
lineNo = -1
if len(failDetails) == 3:
match = re.search("line.*?(\d+)", failDetails[1])
if match is None:
return lineNo
lineNo = int(match.group(1))
return lineNo
def parseDocTestResult(docTestResStr):
""" Extract the line number and filename of the doctest failure"""
lines = docTestResStr.split("\n")
for lineNo, line in enumerate(lines):
failure = line.find("Failed example:")
if failure != -1:
failDetails = lines[lineNo - 1]
yield parseFailDetails(failDetails)
if __name__ == "__main__":
from doctest import testmod
testmod()
|
Fix doc test failure parsing
|
Fix doc test failure parsing
|
Python
|
mit
|
softwaredoug/flake8_doctest
|
def parseFailDetails(failDetails):
- """ Parse the line number of the doctest failure"""
+ """ Parse the line number of the doctest failure
+ >>> parseFailDetails("blah")
+ -1
+ """
import re
failDetails = failDetails.split(',')
lineNo = -1
if len(failDetails) == 3:
match = re.search("line.*?(\d+)", failDetails[1])
if match is None:
return lineNo
lineNo = int(match.group(1))
return lineNo
def parseDocTestResult(docTestResStr):
""" Extract the line number and filename of the doctest failure"""
lines = docTestResStr.split("\n")
for lineNo, line in enumerate(lines):
failure = line.find("Failed example:")
if failure != -1:
failDetails = lines[lineNo - 1]
yield parseFailDetails(failDetails)
+ if __name__ == "__main__":
+ from doctest import testmod
+ testmod()
+
|
Fix doc test failure parsing
|
## Code Before:
def parseFailDetails(failDetails):
""" Parse the line number of the doctest failure"""
import re
failDetails = failDetails.split(',')
lineNo = -1
if len(failDetails) == 3:
match = re.search("line.*?(\d+)", failDetails[1])
if match is None:
return lineNo
lineNo = int(match.group(1))
return lineNo
def parseDocTestResult(docTestResStr):
""" Extract the line number and filename of the doctest failure"""
lines = docTestResStr.split("\n")
for lineNo, line in enumerate(lines):
failure = line.find("Failed example:")
if failure != -1:
failDetails = lines[lineNo - 1]
yield parseFailDetails(failDetails)
## Instruction:
Fix doc test failure parsing
## Code After:
def parseFailDetails(failDetails):
""" Parse the line number of the doctest failure
>>> parseFailDetails("blah")
-1
"""
import re
failDetails = failDetails.split(',')
lineNo = -1
if len(failDetails) == 3:
match = re.search("line.*?(\d+)", failDetails[1])
if match is None:
return lineNo
lineNo = int(match.group(1))
return lineNo
def parseDocTestResult(docTestResStr):
""" Extract the line number and filename of the doctest failure"""
lines = docTestResStr.split("\n")
for lineNo, line in enumerate(lines):
failure = line.find("Failed example:")
if failure != -1:
failDetails = lines[lineNo - 1]
yield parseFailDetails(failDetails)
if __name__ == "__main__":
from doctest import testmod
testmod()
|
def parseFailDetails(failDetails):
- """ Parse the line number of the doctest failure"""
? ---
+ """ Parse the line number of the doctest failure
+ >>> parseFailDetails("blah")
+ -1
+ """
import re
failDetails = failDetails.split(',')
lineNo = -1
if len(failDetails) == 3:
match = re.search("line.*?(\d+)", failDetails[1])
if match is None:
return lineNo
lineNo = int(match.group(1))
return lineNo
def parseDocTestResult(docTestResStr):
""" Extract the line number and filename of the doctest failure"""
lines = docTestResStr.split("\n")
for lineNo, line in enumerate(lines):
failure = line.find("Failed example:")
if failure != -1:
failDetails = lines[lineNo - 1]
yield parseFailDetails(failDetails)
+
+ if __name__ == "__main__":
+ from doctest import testmod
+ testmod()
|
14a085f787f5fe80a0737d97515b71adaf05d1cd
|
checker/checker/contest.py
|
checker/checker/contest.py
|
from checker.abstract import AbstractChecker
import base64
import sys
import codecs
class ContestChecker(AbstractChecker):
def __init__(self, tick, team, service, ip):
AbstractChecker.__init__(self, tick, team, service, ip)
def _rpc(self, function, *args):
sys.stdout.write("%s %s\n" % (function, " ".join(args)))
sys.stdout.flush()
return sys.stdin.readline().strip()
def get_flag(self, tick, payload=None):
if payload is None:
return self._rpc("FLAG", str(tick))
else:
payload = codecs.encode(payload, 'hex').decode('latin-1')
return self._rpc("FLAG", str(tick), payload)
def store_blob(self, ident, blob):
data = base64.b64encode(blob)
return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1'))
def retrieve_blob(self, ident):
data = self._rpc("RETRIEVE", ident)
return base64.b64decode(data)
|
from checker.abstract import AbstractChecker
import base64
import sys
import codecs
class ContestChecker(AbstractChecker):
def __init__(self, tick, team, service, ip):
AbstractChecker.__init__(self, tick, team, service, ip)
def _rpc(self, function, *args):
sys.stdout.write("%s %s\n" % (function, " ".join(args)))
sys.stdout.flush()
return sys.stdin.readline().strip()
def get_flag(self, tick, payload=None):
if payload is None:
return self._rpc("FLAG", str(tick))
else:
payload = codecs.encode(payload, 'hex').decode('latin-1')
return self._rpc("FLAG", str(tick), payload)
def store_blob(self, ident, blob):
data = base64.b64encode(blob)
return self._rpc("STORE", ident, data.decode('latin-1'))
def retrieve_blob(self, ident):
data = self._rpc("RETRIEVE", ident)
return base64.b64decode(data)
|
Fix double-encoding of binary blobs
|
Fix double-encoding of binary blobs
|
Python
|
isc
|
fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver
|
from checker.abstract import AbstractChecker
import base64
import sys
import codecs
class ContestChecker(AbstractChecker):
def __init__(self, tick, team, service, ip):
AbstractChecker.__init__(self, tick, team, service, ip)
def _rpc(self, function, *args):
sys.stdout.write("%s %s\n" % (function, " ".join(args)))
sys.stdout.flush()
return sys.stdin.readline().strip()
def get_flag(self, tick, payload=None):
if payload is None:
return self._rpc("FLAG", str(tick))
else:
payload = codecs.encode(payload, 'hex').decode('latin-1')
return self._rpc("FLAG", str(tick), payload)
def store_blob(self, ident, blob):
data = base64.b64encode(blob)
- return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1'))
+ return self._rpc("STORE", ident, data.decode('latin-1'))
def retrieve_blob(self, ident):
data = self._rpc("RETRIEVE", ident)
return base64.b64decode(data)
|
Fix double-encoding of binary blobs
|
## Code Before:
from checker.abstract import AbstractChecker
import base64
import sys
import codecs
class ContestChecker(AbstractChecker):
def __init__(self, tick, team, service, ip):
AbstractChecker.__init__(self, tick, team, service, ip)
def _rpc(self, function, *args):
sys.stdout.write("%s %s\n" % (function, " ".join(args)))
sys.stdout.flush()
return sys.stdin.readline().strip()
def get_flag(self, tick, payload=None):
if payload is None:
return self._rpc("FLAG", str(tick))
else:
payload = codecs.encode(payload, 'hex').decode('latin-1')
return self._rpc("FLAG", str(tick), payload)
def store_blob(self, ident, blob):
data = base64.b64encode(blob)
return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1'))
def retrieve_blob(self, ident):
data = self._rpc("RETRIEVE", ident)
return base64.b64decode(data)
## Instruction:
Fix double-encoding of binary blobs
## Code After:
from checker.abstract import AbstractChecker
import base64
import sys
import codecs
class ContestChecker(AbstractChecker):
def __init__(self, tick, team, service, ip):
AbstractChecker.__init__(self, tick, team, service, ip)
def _rpc(self, function, *args):
sys.stdout.write("%s %s\n" % (function, " ".join(args)))
sys.stdout.flush()
return sys.stdin.readline().strip()
def get_flag(self, tick, payload=None):
if payload is None:
return self._rpc("FLAG", str(tick))
else:
payload = codecs.encode(payload, 'hex').decode('latin-1')
return self._rpc("FLAG", str(tick), payload)
def store_blob(self, ident, blob):
data = base64.b64encode(blob)
return self._rpc("STORE", ident, data.decode('latin-1'))
def retrieve_blob(self, ident):
data = self._rpc("RETRIEVE", ident)
return base64.b64decode(data)
|
from checker.abstract import AbstractChecker
import base64
import sys
import codecs
class ContestChecker(AbstractChecker):
def __init__(self, tick, team, service, ip):
AbstractChecker.__init__(self, tick, team, service, ip)
def _rpc(self, function, *args):
sys.stdout.write("%s %s\n" % (function, " ".join(args)))
sys.stdout.flush()
return sys.stdin.readline().strip()
def get_flag(self, tick, payload=None):
if payload is None:
return self._rpc("FLAG", str(tick))
else:
payload = codecs.encode(payload, 'hex').decode('latin-1')
return self._rpc("FLAG", str(tick), payload)
def store_blob(self, ident, blob):
data = base64.b64encode(blob)
- return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1'))
? ----------------- -
+ return self._rpc("STORE", ident, data.decode('latin-1'))
def retrieve_blob(self, ident):
data = self._rpc("RETRIEVE", ident)
return base64.b64decode(data)
|
55072134b8053ac126213e580fcc59977cfb7a02
|
scikits/image/setup.py
|
scikits/image/setup.py
|
import os
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('image', parent_package, top_path)
config.add_subpackage('opencv')
config.add_subpackage('graph')
config.add_subpackage('io')
config.add_subpackage('morphology')
config.add_subpackage('filter')
config.add_subpackage('transform')
config.add_subpackage('data')
config.add_subpackage('util')
config.add_subpackage('color')
def add_test_directories(arg, dirname, fnames):
if dirname.split(os.path.sep)[-1] == 'tests':
config.add_data_dir(dirname)
# Add test directories
from os.path import isdir, dirname, join, abspath
rel_isdir = lambda d: isdir(join(curpath, d))
curpath = join(dirname(__file__), './')
subdirs = [join(d, 'tests') for d in os.listdir(curpath) if rel_isdir(d)]
subdirs = [d for d in subdirs if rel_isdir(d)]
for test_dir in subdirs:
config.add_data_dir(test_dir)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = Configuration(top_path='').todict()
setup(**config)
|
import os
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('image', parent_package, top_path)
config.add_subpackage('opencv')
config.add_subpackage('graph')
config.add_subpackage('io')
config.add_subpackage('morphology')
config.add_subpackage('filter')
config.add_subpackage('transform')
config.add_subpackage('data')
config.add_subpackage('util')
config.add_subpackage('color')
config.add_subpackage('draw')
config.add_subpackage('feature')
def add_test_directories(arg, dirname, fnames):
if dirname.split(os.path.sep)[-1] == 'tests':
config.add_data_dir(dirname)
# Add test directories
from os.path import isdir, dirname, join, abspath
rel_isdir = lambda d: isdir(join(curpath, d))
curpath = join(dirname(__file__), './')
subdirs = [join(d, 'tests') for d in os.listdir(curpath) if rel_isdir(d)]
subdirs = [d for d in subdirs if rel_isdir(d)]
for test_dir in subdirs:
config.add_data_dir(test_dir)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = Configuration(top_path='').todict()
setup(**config)
|
Add 'draw' and 'feature' sub-modules.
|
BUG: Add 'draw' and 'feature' sub-modules.
|
Python
|
bsd-3-clause
|
paalge/scikit-image,michaelaye/scikit-image,bennlich/scikit-image,chintak/scikit-image,paalge/scikit-image,ClinicalGraphics/scikit-image,warmspringwinds/scikit-image,michaelaye/scikit-image,chriscrosscutler/scikit-image,ClinicalGraphics/scikit-image,WarrenWeckesser/scikits-image,emmanuelle/scikits.image,Midafi/scikit-image,emmanuelle/scikits.image,WarrenWeckesser/scikits-image,bennlich/scikit-image,GaZ3ll3/scikit-image,michaelpacer/scikit-image,SamHames/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,rjeli/scikit-image,blink1073/scikit-image,jwiggins/scikit-image,emmanuelle/scikits.image,emon10005/scikit-image,SamHames/scikit-image,warmspringwinds/scikit-image,almarklein/scikit-image,almarklein/scikit-image,robintw/scikit-image,keflavich/scikit-image,Midafi/scikit-image,ajaybhat/scikit-image,ajaybhat/scikit-image,dpshelio/scikit-image,emmanuelle/scikits.image,youprofit/scikit-image,jwiggins/scikit-image,SamHames/scikit-image,emon10005/scikit-image,ofgulban/scikit-image,paalge/scikit-image,chintak/scikit-image,newville/scikit-image,juliusbierk/scikit-image,keflavich/scikit-image,Hiyorimi/scikit-image,Hiyorimi/scikit-image,chintak/scikit-image,bsipocz/scikit-image,newville/scikit-image,robintw/scikit-image,vighneshbirodkar/scikit-image,Britefury/scikit-image,youprofit/scikit-image,chintak/scikit-image,oew1v07/scikit-image,blink1073/scikit-image,ofgulban/scikit-image,dpshelio/scikit-image,pratapvardhan/scikit-image,michaelpacer/scikit-image,pratapvardhan/scikit-image,chriscrosscutler/scikit-image,vighneshbirodkar/scikit-image,bsipocz/scikit-image,ofgulban/scikit-image,juliusbierk/scikit-image,Britefury/scikit-image,rjeli/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,SamHames/scikit-image,almarklein/scikit-image,oew1v07/scikit-image
|
import os
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('image', parent_package, top_path)
config.add_subpackage('opencv')
config.add_subpackage('graph')
config.add_subpackage('io')
config.add_subpackage('morphology')
config.add_subpackage('filter')
config.add_subpackage('transform')
config.add_subpackage('data')
config.add_subpackage('util')
config.add_subpackage('color')
+ config.add_subpackage('draw')
+ config.add_subpackage('feature')
def add_test_directories(arg, dirname, fnames):
if dirname.split(os.path.sep)[-1] == 'tests':
config.add_data_dir(dirname)
# Add test directories
from os.path import isdir, dirname, join, abspath
rel_isdir = lambda d: isdir(join(curpath, d))
curpath = join(dirname(__file__), './')
subdirs = [join(d, 'tests') for d in os.listdir(curpath) if rel_isdir(d)]
subdirs = [d for d in subdirs if rel_isdir(d)]
for test_dir in subdirs:
config.add_data_dir(test_dir)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = Configuration(top_path='').todict()
setup(**config)
|
Add 'draw' and 'feature' sub-modules.
|
## Code Before:
import os
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('image', parent_package, top_path)
config.add_subpackage('opencv')
config.add_subpackage('graph')
config.add_subpackage('io')
config.add_subpackage('morphology')
config.add_subpackage('filter')
config.add_subpackage('transform')
config.add_subpackage('data')
config.add_subpackage('util')
config.add_subpackage('color')
def add_test_directories(arg, dirname, fnames):
if dirname.split(os.path.sep)[-1] == 'tests':
config.add_data_dir(dirname)
# Add test directories
from os.path import isdir, dirname, join, abspath
rel_isdir = lambda d: isdir(join(curpath, d))
curpath = join(dirname(__file__), './')
subdirs = [join(d, 'tests') for d in os.listdir(curpath) if rel_isdir(d)]
subdirs = [d for d in subdirs if rel_isdir(d)]
for test_dir in subdirs:
config.add_data_dir(test_dir)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = Configuration(top_path='').todict()
setup(**config)
## Instruction:
Add 'draw' and 'feature' sub-modules.
## Code After:
import os
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('image', parent_package, top_path)
config.add_subpackage('opencv')
config.add_subpackage('graph')
config.add_subpackage('io')
config.add_subpackage('morphology')
config.add_subpackage('filter')
config.add_subpackage('transform')
config.add_subpackage('data')
config.add_subpackage('util')
config.add_subpackage('color')
config.add_subpackage('draw')
config.add_subpackage('feature')
def add_test_directories(arg, dirname, fnames):
if dirname.split(os.path.sep)[-1] == 'tests':
config.add_data_dir(dirname)
# Add test directories
from os.path import isdir, dirname, join, abspath
rel_isdir = lambda d: isdir(join(curpath, d))
curpath = join(dirname(__file__), './')
subdirs = [join(d, 'tests') for d in os.listdir(curpath) if rel_isdir(d)]
subdirs = [d for d in subdirs if rel_isdir(d)]
for test_dir in subdirs:
config.add_data_dir(test_dir)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = Configuration(top_path='').todict()
setup(**config)
|
import os
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('image', parent_package, top_path)
config.add_subpackage('opencv')
config.add_subpackage('graph')
config.add_subpackage('io')
config.add_subpackage('morphology')
config.add_subpackage('filter')
config.add_subpackage('transform')
config.add_subpackage('data')
config.add_subpackage('util')
config.add_subpackage('color')
+ config.add_subpackage('draw')
+ config.add_subpackage('feature')
def add_test_directories(arg, dirname, fnames):
if dirname.split(os.path.sep)[-1] == 'tests':
config.add_data_dir(dirname)
# Add test directories
from os.path import isdir, dirname, join, abspath
rel_isdir = lambda d: isdir(join(curpath, d))
curpath = join(dirname(__file__), './')
subdirs = [join(d, 'tests') for d in os.listdir(curpath) if rel_isdir(d)]
subdirs = [d for d in subdirs if rel_isdir(d)]
for test_dir in subdirs:
config.add_data_dir(test_dir)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = Configuration(top_path='').todict()
setup(**config)
|
9a154b8893a3306e5350a9118e9cfb582d295322
|
traccar_graphql/schema.py
|
traccar_graphql/schema.py
|
import os, graphene, requests
from flask_jwt_extended import get_jwt_identity, get_jwt_claims
from graphql import GraphQLError
from traccar_graphql.models import ServerType, UserType
from traccar_graphql.mutations import LoginType, RegisterType
from traccar_graphql.utils import request2object
TRACCAR_BACKEND = os.environ.get('TRACCAR_BACKEND')
class Query(graphene.ObjectType):
server = graphene.Field(lambda: ServerType)
me = graphene.Field(lambda: UserType)
def resolve_server(self, args, context, info):
r = requests.get("{}/api/server".format(TRACCAR_BACKEND))
return request2object(r, 'ServerType')
def resolve_me(self, args, context, info):
claims = get_jwt_claims()
if 'session' not in claims:
raise GraphQLError('Authentication required')
headers = { 'Cookie': claims['session'] }
r = requests.get("{}/api/session".format(TRACCAR_BACKEND), headers=headers)
return request2object(r, 'UserType')
class Mutation(graphene.ObjectType):
login = LoginType.Field()
register = RegisterType.Field()
schema = graphene.Schema(query=Query, mutation=Mutation)
|
import os, graphene, requests
from flask_jwt_extended import get_jwt_identity, get_jwt_claims
from graphql import GraphQLError
from traccar_graphql.models import ServerType, UserType
from traccar_graphql.mutations import LoginType, RegisterType
from traccar_graphql.utils import request2object
TRACCAR_BACKEND = os.environ.get('TRACCAR_BACKEND')
class Query(graphene.ObjectType):
server = graphene.Field(lambda: ServerType)
def resolve_server(self, args, context, info):
r = requests.get("{}/api/server".format(TRACCAR_BACKEND))
return request2object(r, 'ServerType')
me = graphene.Field(lambda: UserType)
def resolve_me(self, args, context, info):
claims = get_jwt_claims()
if 'session' not in claims:
raise GraphQLError('Authentication required')
headers = { 'Cookie': claims['session'] }
r = requests.get("{}/api/session".format(TRACCAR_BACKEND), headers=headers)
if r.status_code == 404:
raise GraphQLError('Authentication required')
return request2object(r, 'UserType')
class Mutation(graphene.ObjectType):
login = LoginType.Field()
register = RegisterType.Field()
schema = graphene.Schema(query=Query, mutation=Mutation)
|
Handle sign in failure from traccar
|
Handle sign in failure from traccar
|
Python
|
mit
|
sunhoww/traccar_graphql
|
import os, graphene, requests
from flask_jwt_extended import get_jwt_identity, get_jwt_claims
from graphql import GraphQLError
from traccar_graphql.models import ServerType, UserType
from traccar_graphql.mutations import LoginType, RegisterType
from traccar_graphql.utils import request2object
TRACCAR_BACKEND = os.environ.get('TRACCAR_BACKEND')
class Query(graphene.ObjectType):
server = graphene.Field(lambda: ServerType)
- me = graphene.Field(lambda: UserType)
def resolve_server(self, args, context, info):
r = requests.get("{}/api/server".format(TRACCAR_BACKEND))
return request2object(r, 'ServerType')
+
+ me = graphene.Field(lambda: UserType)
def resolve_me(self, args, context, info):
claims = get_jwt_claims()
if 'session' not in claims:
raise GraphQLError('Authentication required')
headers = { 'Cookie': claims['session'] }
r = requests.get("{}/api/session".format(TRACCAR_BACKEND), headers=headers)
+ if r.status_code == 404:
+ raise GraphQLError('Authentication required')
return request2object(r, 'UserType')
class Mutation(graphene.ObjectType):
login = LoginType.Field()
register = RegisterType.Field()
schema = graphene.Schema(query=Query, mutation=Mutation)
|
Handle sign in failure from traccar
|
## Code Before:
import os, graphene, requests
from flask_jwt_extended import get_jwt_identity, get_jwt_claims
from graphql import GraphQLError
from traccar_graphql.models import ServerType, UserType
from traccar_graphql.mutations import LoginType, RegisterType
from traccar_graphql.utils import request2object
TRACCAR_BACKEND = os.environ.get('TRACCAR_BACKEND')
class Query(graphene.ObjectType):
server = graphene.Field(lambda: ServerType)
me = graphene.Field(lambda: UserType)
def resolve_server(self, args, context, info):
r = requests.get("{}/api/server".format(TRACCAR_BACKEND))
return request2object(r, 'ServerType')
def resolve_me(self, args, context, info):
claims = get_jwt_claims()
if 'session' not in claims:
raise GraphQLError('Authentication required')
headers = { 'Cookie': claims['session'] }
r = requests.get("{}/api/session".format(TRACCAR_BACKEND), headers=headers)
return request2object(r, 'UserType')
class Mutation(graphene.ObjectType):
login = LoginType.Field()
register = RegisterType.Field()
schema = graphene.Schema(query=Query, mutation=Mutation)
## Instruction:
Handle sign in failure from traccar
## Code After:
import os, graphene, requests
from flask_jwt_extended import get_jwt_identity, get_jwt_claims
from graphql import GraphQLError
from traccar_graphql.models import ServerType, UserType
from traccar_graphql.mutations import LoginType, RegisterType
from traccar_graphql.utils import request2object
TRACCAR_BACKEND = os.environ.get('TRACCAR_BACKEND')
class Query(graphene.ObjectType):
server = graphene.Field(lambda: ServerType)
def resolve_server(self, args, context, info):
r = requests.get("{}/api/server".format(TRACCAR_BACKEND))
return request2object(r, 'ServerType')
me = graphene.Field(lambda: UserType)
def resolve_me(self, args, context, info):
claims = get_jwt_claims()
if 'session' not in claims:
raise GraphQLError('Authentication required')
headers = { 'Cookie': claims['session'] }
r = requests.get("{}/api/session".format(TRACCAR_BACKEND), headers=headers)
if r.status_code == 404:
raise GraphQLError('Authentication required')
return request2object(r, 'UserType')
class Mutation(graphene.ObjectType):
login = LoginType.Field()
register = RegisterType.Field()
schema = graphene.Schema(query=Query, mutation=Mutation)
|
import os, graphene, requests
from flask_jwt_extended import get_jwt_identity, get_jwt_claims
from graphql import GraphQLError
from traccar_graphql.models import ServerType, UserType
from traccar_graphql.mutations import LoginType, RegisterType
from traccar_graphql.utils import request2object
TRACCAR_BACKEND = os.environ.get('TRACCAR_BACKEND')
class Query(graphene.ObjectType):
server = graphene.Field(lambda: ServerType)
- me = graphene.Field(lambda: UserType)
def resolve_server(self, args, context, info):
r = requests.get("{}/api/server".format(TRACCAR_BACKEND))
return request2object(r, 'ServerType')
+
+ me = graphene.Field(lambda: UserType)
def resolve_me(self, args, context, info):
claims = get_jwt_claims()
if 'session' not in claims:
raise GraphQLError('Authentication required')
headers = { 'Cookie': claims['session'] }
r = requests.get("{}/api/session".format(TRACCAR_BACKEND), headers=headers)
+ if r.status_code == 404:
+ raise GraphQLError('Authentication required')
return request2object(r, 'UserType')
class Mutation(graphene.ObjectType):
login = LoginType.Field()
register = RegisterType.Field()
schema = graphene.Schema(query=Query, mutation=Mutation)
|
680271d4669a309977e5fcfe89f92ea35ebc8d6f
|
common/djangoapps/dark_lang/migrations/0002_data__enable_on_install.py
|
common/djangoapps/dark_lang/migrations/0002_data__enable_on_install.py
|
from __future__ import unicode_literals
# Converted from the original South migration 0002_enable_on_install.py
#
from django.db import migrations, models
def create_dark_lang_config(apps, schema_editor):
"""
Enable DarkLang by default when it is installed, to prevent accidental
release of testing languages.
"""
dark_lang_model = apps.get_model("dark_lang", "DarkLangConfig")
db_alias = schema_editor.connection.alias
dark_lang_model.objects.using(db_alias).get_or_create(enabled=True)
def remove_dark_lang_config(apps, schema_editor):
"""Write your backwards methods here."""
raise RuntimeError("Cannot reverse this migration.")
class Migration(migrations.Migration):
dependencies = [
('dark_lang', '0001_initial'),
]
operations = [
migrations.RunPython(create_dark_lang_config, remove_dark_lang_config),
]
|
from __future__ import unicode_literals
# Converted from the original South migration 0002_enable_on_install.py
#
from django.db import migrations, models
def create_dark_lang_config(apps, schema_editor):
"""
Enable DarkLang by default when it is installed, to prevent accidental
release of testing languages.
"""
dark_lang_model = apps.get_model("dark_lang", "DarkLangConfig")
db_alias = schema_editor.connection.alias
if not dark_lang_model.objects.using(db_alias).exists():
dark_lang_model.objects.using(db_alias).create(enabled=True)
def remove_dark_lang_config(apps, schema_editor):
"""Write your backwards methods here."""
raise RuntimeError("Cannot reverse this migration.")
class Migration(migrations.Migration):
dependencies = [
('dark_lang', '0001_initial'),
]
operations = [
migrations.RunPython(create_dark_lang_config, remove_dark_lang_config),
]
|
Correct the darklang migration, since many darklang configs can exist.
|
Correct the darklang migration, since many darklang configs can exist.
|
Python
|
agpl-3.0
|
waheedahmed/edx-platform,hamzehd/edx-platform,ovnicraft/edx-platform,hamzehd/edx-platform,miptliot/edx-platform,Lektorium-LLC/edx-platform,kursitet/edx-platform,Ayub-Khan/edx-platform,marcore/edx-platform,teltek/edx-platform,analyseuc3m/ANALYSE-v1,msegado/edx-platform,franosincic/edx-platform,marcore/edx-platform,kmoocdev2/edx-platform,philanthropy-u/edx-platform,Edraak/circleci-edx-platform,gymnasium/edx-platform,jzoldak/edx-platform,arbrandes/edx-platform,eduNEXT/edunext-platform,EDUlib/edx-platform,edx/edx-platform,cognitiveclass/edx-platform,louyihua/edx-platform,deepsrijit1105/edx-platform,jjmiranda/edx-platform,msegado/edx-platform,stvstnfrd/edx-platform,mbareta/edx-platform-ft,Edraak/edx-platform,antoviaque/edx-platform,louyihua/edx-platform,deepsrijit1105/edx-platform,prarthitm/edxplatform,devs1991/test_edx_docmode,MakeHer/edx-platform,edx-solutions/edx-platform,halvertoluke/edx-platform,ESOedX/edx-platform,a-parhom/edx-platform,EDUlib/edx-platform,nttks/edx-platform,antoviaque/edx-platform,doganov/edx-platform,wwj718/edx-platform,caesar2164/edx-platform,jolyonb/edx-platform,cpennington/edx-platform,CourseTalk/edx-platform,mbareta/edx-platform-ft,solashirai/edx-platform,ampax/edx-platform,chrisndodge/edx-platform,cpennington/edx-platform,procangroup/edx-platform,proversity-org/edx-platform,bigdatauniversity/edx-platform,ovnicraft/edx-platform,cecep-edu/edx-platform,ZLLab-Mooc/edx-platform,antoviaque/edx-platform,Edraak/edraak-platform,JioEducation/edx-platform,doganov/edx-platform,franosincic/edx-platform,devs1991/test_edx_docmode,10clouds/edx-platform,amir-qayyum-khan/edx-platform,shabab12/edx-platform,gsehub/edx-platform,cpennington/edx-platform,hamzehd/edx-platform,kmoocdev2/edx-platform,pabloborrego93/edx-platform,kursitet/edx-platform,fintech-circle/edx-platform,a-parhom/edx-platform,solashirai/edx-platform,Livit/Livit.Learn.EdX,MakeHer/edx-platform,angelapper/edx-platform,UOMx/edx-platform,wwj718/edx-platform,IndonesiaX/edx-platform,inares/edx-platform,appsembler/edx-platform,10clouds/edx-platform,hastexo/edx-platform,romain-li/edx-platform,pomegranited/edx-platform,BehavioralInsightsTeam/edx-platform,IndonesiaX/edx-platform,halvertoluke/edx-platform,Edraak/edx-platform,angelapper/edx-platform,marcore/edx-platform,pepeportela/edx-platform,waheedahmed/edx-platform,10clouds/edx-platform,mbareta/edx-platform-ft,mbareta/edx-platform-ft,synergeticsedx/deployment-wipro,angelapper/edx-platform,inares/edx-platform,nttks/edx-platform,kursitet/edx-platform,RPI-OPENEDX/edx-platform,defance/edx-platform,doganov/edx-platform,eduNEXT/edunext-platform,solashirai/edx-platform,philanthropy-u/edx-platform,tanmaykm/edx-platform,jjmiranda/edx-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,jzoldak/edx-platform,nttks/edx-platform,Edraak/circleci-edx-platform,lduarte1991/edx-platform,kmoocdev2/edx-platform,wwj718/edx-platform,ZLLab-Mooc/edx-platform,pomegranited/edx-platform,miptliot/edx-platform,mitocw/edx-platform,jbzdak/edx-platform,Edraak/circleci-edx-platform,MakeHer/edx-platform,proversity-org/edx-platform,ZLLab-Mooc/edx-platform,halvertoluke/edx-platform,MakeHer/edx-platform,mitocw/edx-platform,pepeportela/edx-platform,marcore/edx-platform,tanmaykm/edx-platform,franosincic/edx-platform,BehavioralInsightsTeam/edx-platform,Endika/edx-platform,zhenzhai/edx-platform,defance/edx-platform,pepeportela/edx-platform,Lektorium-LLC/edx-platform,shabab12/edx-platform,ovnicraft/edx-platform,pepeportela/edx-platform,JioEducation/edx-platform,JioEducation/edx-platform,Endika/edx-platform,Endika/edx-platform,a-parhom/edx-platform,arbrandes/edx-platform,TeachAtTUM/edx-platform,cognitiveclass/edx-platform,hastexo/edx-platform,edx/edx-platform,synergeticsedx/deployment-wipro,CourseTalk/edx-platform,louyihua/edx-platform,Livit/Livit.Learn.EdX,eduNEXT/edx-platform,Edraak/edraak-platform,mitocw/edx-platform,arbrandes/edx-platform,caesar2164/edx-platform,cognitiveclass/edx-platform,edx-solutions/edx-platform,ahmedaljazzar/edx-platform,gymnasium/edx-platform,BehavioralInsightsTeam/edx-platform,ampax/edx-platform,franosincic/edx-platform,CredoReference/edx-platform,cecep-edu/edx-platform,shurihell/testasia,edx-solutions/edx-platform,longmen21/edx-platform,EDUlib/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,amir-qayyum-khan/edx-platform,romain-li/edx-platform,halvertoluke/edx-platform,simbs/edx-platform,Livit/Livit.Learn.EdX,wwj718/edx-platform,caesar2164/edx-platform,TeachAtTUM/edx-platform,JioEducation/edx-platform,eduNEXT/edunext-platform,solashirai/edx-platform,gsehub/edx-platform,solashirai/edx-platform,Ayub-Khan/edx-platform,itsjeyd/edx-platform,bigdatauniversity/edx-platform,RPI-OPENEDX/edx-platform,philanthropy-u/edx-platform,lduarte1991/edx-platform,defance/edx-platform,romain-li/edx-platform,shurihell/testasia,pabloborrego93/edx-platform,msegado/edx-platform,gymnasium/edx-platform,pomegranited/edx-platform,mitocw/edx-platform,RPI-OPENEDX/edx-platform,Edraak/circleci-edx-platform,simbs/edx-platform,pomegranited/edx-platform,Livit/Livit.Learn.EdX,Stanford-Online/edx-platform,zhenzhai/edx-platform,ESOedX/edx-platform,EDUlib/edx-platform,naresh21/synergetics-edx-platform,Stanford-Online/edx-platform,procangroup/edx-platform,simbs/edx-platform,Ayub-Khan/edx-platform,longmen21/edx-platform,raccoongang/edx-platform,hastexo/edx-platform,deepsrijit1105/edx-platform,Edraak/circleci-edx-platform,miptliot/edx-platform,CourseTalk/edx-platform,waheedahmed/edx-platform,itsjeyd/edx-platform,kmoocdev2/edx-platform,procangroup/edx-platform,devs1991/test_edx_docmode,shabab12/edx-platform,antoviaque/edx-platform,edx/edx-platform,gsehub/edx-platform,msegado/edx-platform,deepsrijit1105/edx-platform,appsembler/edx-platform,Edraak/edx-platform,tanmaykm/edx-platform,zhenzhai/edx-platform,Edraak/edx-platform,inares/edx-platform,chrisndodge/edx-platform,alu042/edx-platform,ESOedX/edx-platform,itsjeyd/edx-platform,jbzdak/edx-platform,teltek/edx-platform,miptliot/edx-platform,ovnicraft/edx-platform,RPI-OPENEDX/edx-platform,halvertoluke/edx-platform,inares/edx-platform,chrisndodge/edx-platform,philanthropy-u/edx-platform,UOMx/edx-platform,simbs/edx-platform,a-parhom/edx-platform,longmen21/edx-platform,teltek/edx-platform,nttks/edx-platform,devs1991/test_edx_docmode,ZLLab-Mooc/edx-platform,shurihell/testasia,bigdatauniversity/edx-platform,romain-li/edx-platform,stvstnfrd/edx-platform,jbzdak/edx-platform,Edraak/edx-platform,jbzdak/edx-platform,cecep-edu/edx-platform,synergeticsedx/deployment-wipro,waheedahmed/edx-platform,appsembler/edx-platform,alu042/edx-platform,lduarte1991/edx-platform,hastexo/edx-platform,caesar2164/edx-platform,BehavioralInsightsTeam/edx-platform,cecep-edu/edx-platform,romain-li/edx-platform,bigdatauniversity/edx-platform,ampax/edx-platform,devs1991/test_edx_docmode,bigdatauniversity/edx-platform,Lektorium-LLC/edx-platform,Endika/edx-platform,raccoongang/edx-platform,shabab12/edx-platform,jjmiranda/edx-platform,Stanford-Online/edx-platform,defance/edx-platform,raccoongang/edx-platform,IndonesiaX/edx-platform,analyseuc3m/ANALYSE-v1,teltek/edx-platform,procangroup/edx-platform,TeachAtTUM/edx-platform,pabloborrego93/edx-platform,CredoReference/edx-platform,stvstnfrd/edx-platform,amir-qayyum-khan/edx-platform,jzoldak/edx-platform,arbrandes/edx-platform,IndonesiaX/edx-platform,cpennington/edx-platform,prarthitm/edxplatform,pomegranited/edx-platform,lduarte1991/edx-platform,edx/edx-platform,Edraak/edraak-platform,UOMx/edx-platform,itsjeyd/edx-platform,fintech-circle/edx-platform,eduNEXT/edunext-platform,Stanford-Online/edx-platform,appsembler/edx-platform,kursitet/edx-platform,CredoReference/edx-platform,ahmedaljazzar/edx-platform,prarthitm/edxplatform,jolyonb/edx-platform,jolyonb/edx-platform,jolyonb/edx-platform,devs1991/test_edx_docmode,shurihell/testasia,doganov/edx-platform,proversity-org/edx-platform,Ayub-Khan/edx-platform,ampax/edx-platform,ovnicraft/edx-platform,cognitiveclass/edx-platform,angelapper/edx-platform,simbs/edx-platform,pabloborrego93/edx-platform,synergeticsedx/deployment-wipro,waheedahmed/edx-platform,naresh21/synergetics-edx-platform,alu042/edx-platform,shurihell/testasia,prarthitm/edxplatform,ESOedX/edx-platform,cecep-edu/edx-platform,fintech-circle/edx-platform,stvstnfrd/edx-platform,naresh21/synergetics-edx-platform,zhenzhai/edx-platform,MakeHer/edx-platform,kmoocdev2/edx-platform,gsehub/edx-platform,analyseuc3m/ANALYSE-v1,jjmiranda/edx-platform,kursitet/edx-platform,proversity-org/edx-platform,hamzehd/edx-platform,longmen21/edx-platform,inares/edx-platform,wwj718/edx-platform,tanmaykm/edx-platform,CourseTalk/edx-platform,analyseuc3m/ANALYSE-v1,hamzehd/edx-platform,gymnasium/edx-platform,louyihua/edx-platform,jzoldak/edx-platform,naresh21/synergetics-edx-platform,devs1991/test_edx_docmode,Edraak/edraak-platform,amir-qayyum-khan/edx-platform,ahmedaljazzar/edx-platform,UOMx/edx-platform,msegado/edx-platform,CredoReference/edx-platform,RPI-OPENEDX/edx-platform,Ayub-Khan/edx-platform,IndonesiaX/edx-platform,10clouds/edx-platform,Lektorium-LLC/edx-platform,chrisndodge/edx-platform,TeachAtTUM/edx-platform,jbzdak/edx-platform,raccoongang/edx-platform,ZLLab-Mooc/edx-platform,ahmedaljazzar/edx-platform,devs1991/test_edx_docmode,franosincic/edx-platform,cognitiveclass/edx-platform,fintech-circle/edx-platform,zhenzhai/edx-platform,nttks/edx-platform,alu042/edx-platform,doganov/edx-platform,longmen21/edx-platform
|
from __future__ import unicode_literals
# Converted from the original South migration 0002_enable_on_install.py
#
from django.db import migrations, models
def create_dark_lang_config(apps, schema_editor):
"""
Enable DarkLang by default when it is installed, to prevent accidental
release of testing languages.
"""
dark_lang_model = apps.get_model("dark_lang", "DarkLangConfig")
db_alias = schema_editor.connection.alias
+ if not dark_lang_model.objects.using(db_alias).exists():
- dark_lang_model.objects.using(db_alias).get_or_create(enabled=True)
+ dark_lang_model.objects.using(db_alias).create(enabled=True)
def remove_dark_lang_config(apps, schema_editor):
"""Write your backwards methods here."""
raise RuntimeError("Cannot reverse this migration.")
class Migration(migrations.Migration):
dependencies = [
('dark_lang', '0001_initial'),
]
operations = [
migrations.RunPython(create_dark_lang_config, remove_dark_lang_config),
]
|
Correct the darklang migration, since many darklang configs can exist.
|
## Code Before:
from __future__ import unicode_literals
# Converted from the original South migration 0002_enable_on_install.py
#
from django.db import migrations, models
def create_dark_lang_config(apps, schema_editor):
"""
Enable DarkLang by default when it is installed, to prevent accidental
release of testing languages.
"""
dark_lang_model = apps.get_model("dark_lang", "DarkLangConfig")
db_alias = schema_editor.connection.alias
dark_lang_model.objects.using(db_alias).get_or_create(enabled=True)
def remove_dark_lang_config(apps, schema_editor):
"""Write your backwards methods here."""
raise RuntimeError("Cannot reverse this migration.")
class Migration(migrations.Migration):
dependencies = [
('dark_lang', '0001_initial'),
]
operations = [
migrations.RunPython(create_dark_lang_config, remove_dark_lang_config),
]
## Instruction:
Correct the darklang migration, since many darklang configs can exist.
## Code After:
from __future__ import unicode_literals
# Converted from the original South migration 0002_enable_on_install.py
#
from django.db import migrations, models
def create_dark_lang_config(apps, schema_editor):
"""
Enable DarkLang by default when it is installed, to prevent accidental
release of testing languages.
"""
dark_lang_model = apps.get_model("dark_lang", "DarkLangConfig")
db_alias = schema_editor.connection.alias
if not dark_lang_model.objects.using(db_alias).exists():
dark_lang_model.objects.using(db_alias).create(enabled=True)
def remove_dark_lang_config(apps, schema_editor):
"""Write your backwards methods here."""
raise RuntimeError("Cannot reverse this migration.")
class Migration(migrations.Migration):
dependencies = [
('dark_lang', '0001_initial'),
]
operations = [
migrations.RunPython(create_dark_lang_config, remove_dark_lang_config),
]
|
from __future__ import unicode_literals
# Converted from the original South migration 0002_enable_on_install.py
#
from django.db import migrations, models
def create_dark_lang_config(apps, schema_editor):
"""
Enable DarkLang by default when it is installed, to prevent accidental
release of testing languages.
"""
dark_lang_model = apps.get_model("dark_lang", "DarkLangConfig")
db_alias = schema_editor.connection.alias
+ if not dark_lang_model.objects.using(db_alias).exists():
- dark_lang_model.objects.using(db_alias).get_or_create(enabled=True)
? -------
+ dark_lang_model.objects.using(db_alias).create(enabled=True)
? ++++
def remove_dark_lang_config(apps, schema_editor):
"""Write your backwards methods here."""
raise RuntimeError("Cannot reverse this migration.")
class Migration(migrations.Migration):
dependencies = [
('dark_lang', '0001_initial'),
]
operations = [
migrations.RunPython(create_dark_lang_config, remove_dark_lang_config),
]
|
a34c9628c3f383e7b6f5eb521a9493f2b51d8811
|
plata/reporting/views.py
|
plata/reporting/views.py
|
from decimal import Decimal
import StringIO
from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from pdfdocument.utils import pdf_response
import plata
import plata.reporting.product
import plata.reporting.order
@staff_member_required
def product_xls(request):
output = StringIO.StringIO()
workbook = plata.reporting.product.product_xls()
workbook.save(output)
response = HttpResponse(output.getvalue(), mimetype='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename=products.xls'
return response
@staff_member_required
def order_pdf(request, order_id):
order = get_object_or_404(plata.shop_instance().order_model, pk=order_id)
order.shipping_cost = 8 / Decimal('1.076')
order.shipping_discount = 0
order.recalculate_total(save=False)
pdf, response = pdf_response('order-%09d' % order.id)
plata.reporting.order.order_pdf(pdf, order)
return response
|
from decimal import Decimal
import StringIO
from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from pdfdocument.utils import pdf_response
import plata
import plata.reporting.product
import plata.reporting.order
@staff_member_required
def product_xls(request):
output = StringIO.StringIO()
workbook = plata.reporting.product.product_xls()
workbook.save(output)
response = HttpResponse(output.getvalue(), mimetype='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename=products.xls'
return response
@staff_member_required
def order_pdf(request, order_id):
order = get_object_or_404(plata.shop_instance().order_model, pk=order_id)
pdf, response = pdf_response('order-%09d' % order.id)
plata.reporting.order.order_pdf(pdf, order)
return response
|
Remove hardcoded shipping modification in order PDF view
|
Remove hardcoded shipping modification in order PDF view
|
Python
|
bsd-3-clause
|
stefanklug/plata,armicron/plata,armicron/plata,allink/plata,armicron/plata
|
from decimal import Decimal
import StringIO
from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from pdfdocument.utils import pdf_response
import plata
import plata.reporting.product
import plata.reporting.order
@staff_member_required
def product_xls(request):
output = StringIO.StringIO()
workbook = plata.reporting.product.product_xls()
workbook.save(output)
response = HttpResponse(output.getvalue(), mimetype='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename=products.xls'
return response
@staff_member_required
def order_pdf(request, order_id):
order = get_object_or_404(plata.shop_instance().order_model, pk=order_id)
- order.shipping_cost = 8 / Decimal('1.076')
- order.shipping_discount = 0
- order.recalculate_total(save=False)
-
pdf, response = pdf_response('order-%09d' % order.id)
plata.reporting.order.order_pdf(pdf, order)
return response
|
Remove hardcoded shipping modification in order PDF view
|
## Code Before:
from decimal import Decimal
import StringIO
from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from pdfdocument.utils import pdf_response
import plata
import plata.reporting.product
import plata.reporting.order
@staff_member_required
def product_xls(request):
output = StringIO.StringIO()
workbook = plata.reporting.product.product_xls()
workbook.save(output)
response = HttpResponse(output.getvalue(), mimetype='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename=products.xls'
return response
@staff_member_required
def order_pdf(request, order_id):
order = get_object_or_404(plata.shop_instance().order_model, pk=order_id)
order.shipping_cost = 8 / Decimal('1.076')
order.shipping_discount = 0
order.recalculate_total(save=False)
pdf, response = pdf_response('order-%09d' % order.id)
plata.reporting.order.order_pdf(pdf, order)
return response
## Instruction:
Remove hardcoded shipping modification in order PDF view
## Code After:
from decimal import Decimal
import StringIO
from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from pdfdocument.utils import pdf_response
import plata
import plata.reporting.product
import plata.reporting.order
@staff_member_required
def product_xls(request):
output = StringIO.StringIO()
workbook = plata.reporting.product.product_xls()
workbook.save(output)
response = HttpResponse(output.getvalue(), mimetype='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename=products.xls'
return response
@staff_member_required
def order_pdf(request, order_id):
order = get_object_or_404(plata.shop_instance().order_model, pk=order_id)
pdf, response = pdf_response('order-%09d' % order.id)
plata.reporting.order.order_pdf(pdf, order)
return response
|
from decimal import Decimal
import StringIO
from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from pdfdocument.utils import pdf_response
import plata
import plata.reporting.product
import plata.reporting.order
@staff_member_required
def product_xls(request):
output = StringIO.StringIO()
workbook = plata.reporting.product.product_xls()
workbook.save(output)
response = HttpResponse(output.getvalue(), mimetype='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename=products.xls'
return response
@staff_member_required
def order_pdf(request, order_id):
order = get_object_or_404(plata.shop_instance().order_model, pk=order_id)
- order.shipping_cost = 8 / Decimal('1.076')
- order.shipping_discount = 0
- order.recalculate_total(save=False)
-
pdf, response = pdf_response('order-%09d' % order.id)
plata.reporting.order.order_pdf(pdf, order)
return response
|
65d7ff9fc275bd6186484236d7a0d03c65cc62d7
|
peerinst/admin.py
|
peerinst/admin.py
|
from __future__ import unicode_literals
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from . import models
@admin.register(models.Question)
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['title']}),
(_('Main image or video'), {'fields': ['primary_image', 'primary_video_url']}),
(_('Secondary image or video'), {
'fields': ['secondary_image', 'secondary_video_url'],
'classes': ['collapse'],
'description': _(
'Choose either a video or image to include on the first page of the question, '
'where students select concept tags. This is only used if you want the question '
'to be hidden when students select concept tags; instead, a preliminary video or '
'image can be displayed. The main question image will be displayed on all '
'subsequent pages.'
),
}),
(_('Answers'), {'fields': [
'answer_style', 'answer_num_choices', 'correct_answer', 'second_best_answer'
]}),
(None, {'fields': ['example_rationale']}),
]
@admin.register(models.Assignment)
class AssignmentAdmin(admin.ModelAdmin):
pass
|
from __future__ import unicode_literals
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from . import models
@admin.register(models.Question)
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['title']}),
(_('Main image or video'), {'fields': ['primary_image', 'primary_video_url']}),
(_('Secondary image or video'), {
'fields': ['secondary_image', 'secondary_video_url'],
'classes': ['collapse'],
'description': _(
'Choose either a video or image to include on the first page of the question, '
'where students select concept tags. This is only used if you want the question '
'to be hidden when students select concept tags; instead, a preliminary video or '
'image can be displayed. The main question image will be displayed on all '
'subsequent pages.'
),
}),
(_('Answers'), {'fields': [
'answer_style', 'answer_num_choices', 'correct_answer', 'second_best_answer'
]}),
(None, {'fields': ['example_rationale']}),
]
@admin.register(models.Assignment)
class AssignmentAdmin(admin.ModelAdmin):
filter_horizontal = ['questions']
|
Use nifty filter widget for selecting questions for an assignment.
|
Use nifty filter widget for selecting questions for an assignment.
|
Python
|
agpl-3.0
|
open-craft/dalite-ng,open-craft/dalite-ng,open-craft/dalite-ng
|
from __future__ import unicode_literals
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from . import models
@admin.register(models.Question)
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['title']}),
(_('Main image or video'), {'fields': ['primary_image', 'primary_video_url']}),
(_('Secondary image or video'), {
'fields': ['secondary_image', 'secondary_video_url'],
'classes': ['collapse'],
'description': _(
'Choose either a video or image to include on the first page of the question, '
'where students select concept tags. This is only used if you want the question '
'to be hidden when students select concept tags; instead, a preliminary video or '
'image can be displayed. The main question image will be displayed on all '
'subsequent pages.'
),
}),
(_('Answers'), {'fields': [
'answer_style', 'answer_num_choices', 'correct_answer', 'second_best_answer'
]}),
(None, {'fields': ['example_rationale']}),
]
@admin.register(models.Assignment)
class AssignmentAdmin(admin.ModelAdmin):
- pass
+ filter_horizontal = ['questions']
|
Use nifty filter widget for selecting questions for an assignment.
|
## Code Before:
from __future__ import unicode_literals
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from . import models
@admin.register(models.Question)
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['title']}),
(_('Main image or video'), {'fields': ['primary_image', 'primary_video_url']}),
(_('Secondary image or video'), {
'fields': ['secondary_image', 'secondary_video_url'],
'classes': ['collapse'],
'description': _(
'Choose either a video or image to include on the first page of the question, '
'where students select concept tags. This is only used if you want the question '
'to be hidden when students select concept tags; instead, a preliminary video or '
'image can be displayed. The main question image will be displayed on all '
'subsequent pages.'
),
}),
(_('Answers'), {'fields': [
'answer_style', 'answer_num_choices', 'correct_answer', 'second_best_answer'
]}),
(None, {'fields': ['example_rationale']}),
]
@admin.register(models.Assignment)
class AssignmentAdmin(admin.ModelAdmin):
pass
## Instruction:
Use nifty filter widget for selecting questions for an assignment.
## Code After:
from __future__ import unicode_literals
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from . import models
@admin.register(models.Question)
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['title']}),
(_('Main image or video'), {'fields': ['primary_image', 'primary_video_url']}),
(_('Secondary image or video'), {
'fields': ['secondary_image', 'secondary_video_url'],
'classes': ['collapse'],
'description': _(
'Choose either a video or image to include on the first page of the question, '
'where students select concept tags. This is only used if you want the question '
'to be hidden when students select concept tags; instead, a preliminary video or '
'image can be displayed. The main question image will be displayed on all '
'subsequent pages.'
),
}),
(_('Answers'), {'fields': [
'answer_style', 'answer_num_choices', 'correct_answer', 'second_best_answer'
]}),
(None, {'fields': ['example_rationale']}),
]
@admin.register(models.Assignment)
class AssignmentAdmin(admin.ModelAdmin):
filter_horizontal = ['questions']
|
from __future__ import unicode_literals
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from . import models
@admin.register(models.Question)
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['title']}),
(_('Main image or video'), {'fields': ['primary_image', 'primary_video_url']}),
(_('Secondary image or video'), {
'fields': ['secondary_image', 'secondary_video_url'],
'classes': ['collapse'],
'description': _(
'Choose either a video or image to include on the first page of the question, '
'where students select concept tags. This is only used if you want the question '
'to be hidden when students select concept tags; instead, a preliminary video or '
'image can be displayed. The main question image will be displayed on all '
'subsequent pages.'
),
}),
(_('Answers'), {'fields': [
'answer_style', 'answer_num_choices', 'correct_answer', 'second_best_answer'
]}),
(None, {'fields': ['example_rationale']}),
]
@admin.register(models.Assignment)
class AssignmentAdmin(admin.ModelAdmin):
- pass
+ filter_horizontal = ['questions']
|
4379d143cfb5bb4c49febd881d7691aed7039210
|
judge/sandbox.py
|
judge/sandbox.py
|
import asyncio
class Sandbox:
def __init__(self):
self._process = None
async def start(self):
if self._process is not None:
raise ValueError("The sandbox has started")
self._process = await asyncio.create_subprocess_exec(
"sandbox",
stdin = asyncio.subprocess.PIPE,
stdout = asyncio.subprocess.PIPE)
async def close(self):
if self._process is None:
raise ValueError("The sandbox has not started")
if self._process.returncode is not None:
return
self._process.stdin.close()
await self._process.wait()
async def __aenter__(self):
await self.start()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
async def execute(self, *args, **kwargs):
raise NotImplementedError()
async def read(self, filename):
raise NotImplementedError()
async def write(self, *args, **kwargs):
raise NotImplementedError()
|
import asyncio
class Sandbox:
def __init__(self):
self._process = None
async def start(self):
if self._process is not None:
raise ValueError("The sandbox has started")
self._process = await asyncio.create_subprocess_exec(
"sandbox",
stdin = asyncio.subprocess.PIPE,
stdout = asyncio.subprocess.PIPE)
async def close(self):
if self._process is None:
raise ValueError("The sandbox has not started")
if self._process.returncode is not None:
return
self._process.stdin.close()
await self._process.wait()
async def __aenter__(self):
await self.start()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
async def execute(self, *args, **kwargs):
raise NotImplementedError()
async def read(self, filename):
if b"\0" in filename:
raise ValueError("filename contains null charactor")
if b"\n" in filename:
raise ValueError("filename contains newline")
self._process.stdin.write(b"read %b\n" % filename)
result = b"";
while True:
line = await self._process.stdout.readuntil()
if line == b"ok\n":
return result
if line == b"error\n":
raise asyncio.IncompleteReadError(result, None)
count = int(line)
result += await self._process.stdout.readexactly(count)
async def write(self, *args, **kwargs):
raise NotImplementedError()
|
Implement read of Python wrapper
|
Implement read of Python wrapper
|
Python
|
agpl-3.0
|
johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj
|
import asyncio
class Sandbox:
def __init__(self):
self._process = None
async def start(self):
if self._process is not None:
raise ValueError("The sandbox has started")
self._process = await asyncio.create_subprocess_exec(
"sandbox",
stdin = asyncio.subprocess.PIPE,
stdout = asyncio.subprocess.PIPE)
async def close(self):
if self._process is None:
raise ValueError("The sandbox has not started")
if self._process.returncode is not None:
return
self._process.stdin.close()
await self._process.wait()
async def __aenter__(self):
await self.start()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
async def execute(self, *args, **kwargs):
raise NotImplementedError()
async def read(self, filename):
- raise NotImplementedError()
+ if b"\0" in filename:
+ raise ValueError("filename contains null charactor")
+ if b"\n" in filename:
+ raise ValueError("filename contains newline")
+
+ self._process.stdin.write(b"read %b\n" % filename)
+
+ result = b"";
+ while True:
+ line = await self._process.stdout.readuntil()
+ if line == b"ok\n":
+ return result
+ if line == b"error\n":
+ raise asyncio.IncompleteReadError(result, None)
+ count = int(line)
+ result += await self._process.stdout.readexactly(count)
+
async def write(self, *args, **kwargs):
raise NotImplementedError()
|
Implement read of Python wrapper
|
## Code Before:
import asyncio
class Sandbox:
def __init__(self):
self._process = None
async def start(self):
if self._process is not None:
raise ValueError("The sandbox has started")
self._process = await asyncio.create_subprocess_exec(
"sandbox",
stdin = asyncio.subprocess.PIPE,
stdout = asyncio.subprocess.PIPE)
async def close(self):
if self._process is None:
raise ValueError("The sandbox has not started")
if self._process.returncode is not None:
return
self._process.stdin.close()
await self._process.wait()
async def __aenter__(self):
await self.start()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
async def execute(self, *args, **kwargs):
raise NotImplementedError()
async def read(self, filename):
raise NotImplementedError()
async def write(self, *args, **kwargs):
raise NotImplementedError()
## Instruction:
Implement read of Python wrapper
## Code After:
import asyncio
class Sandbox:
def __init__(self):
self._process = None
async def start(self):
if self._process is not None:
raise ValueError("The sandbox has started")
self._process = await asyncio.create_subprocess_exec(
"sandbox",
stdin = asyncio.subprocess.PIPE,
stdout = asyncio.subprocess.PIPE)
async def close(self):
if self._process is None:
raise ValueError("The sandbox has not started")
if self._process.returncode is not None:
return
self._process.stdin.close()
await self._process.wait()
async def __aenter__(self):
await self.start()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
async def execute(self, *args, **kwargs):
raise NotImplementedError()
async def read(self, filename):
if b"\0" in filename:
raise ValueError("filename contains null charactor")
if b"\n" in filename:
raise ValueError("filename contains newline")
self._process.stdin.write(b"read %b\n" % filename)
result = b"";
while True:
line = await self._process.stdout.readuntil()
if line == b"ok\n":
return result
if line == b"error\n":
raise asyncio.IncompleteReadError(result, None)
count = int(line)
result += await self._process.stdout.readexactly(count)
async def write(self, *args, **kwargs):
raise NotImplementedError()
|
import asyncio
class Sandbox:
def __init__(self):
self._process = None
async def start(self):
if self._process is not None:
raise ValueError("The sandbox has started")
self._process = await asyncio.create_subprocess_exec(
"sandbox",
stdin = asyncio.subprocess.PIPE,
stdout = asyncio.subprocess.PIPE)
async def close(self):
if self._process is None:
raise ValueError("The sandbox has not started")
if self._process.returncode is not None:
return
self._process.stdin.close()
await self._process.wait()
async def __aenter__(self):
await self.start()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
async def execute(self, *args, **kwargs):
raise NotImplementedError()
async def read(self, filename):
- raise NotImplementedError()
+ if b"\0" in filename:
+ raise ValueError("filename contains null charactor")
+ if b"\n" in filename:
+ raise ValueError("filename contains newline")
+
+ self._process.stdin.write(b"read %b\n" % filename)
+
+ result = b"";
+ while True:
+ line = await self._process.stdout.readuntil()
+ if line == b"ok\n":
+ return result
+ if line == b"error\n":
+ raise asyncio.IncompleteReadError(result, None)
+ count = int(line)
+ result += await self._process.stdout.readexactly(count)
+
async def write(self, *args, **kwargs):
raise NotImplementedError()
|
f7b351a43d99a6063c49dfdf8db60c654fd89b74
|
scrapi/processing/postgres.py
|
scrapi/processing/postgres.py
|
from __future__ import absolute_import
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webview.settings")
import logging
from api.webview.models import Document
from scrapi import events
from scrapi.processing.base import BaseProcessor
logger = logging.getLogger(__name__)
class PostgresProcessor(BaseProcessor):
NAME = 'postgres'
@events.logged(events.PROCESSING, 'raw.postgres')
def process_raw(self, raw_doc):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.raw = raw_doc.attributes
document.save()
@events.logged(events.PROCESSING, 'normalized.postgres')
def process_normalized(self, raw_doc, normalized):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.normalized = normalized.attributes
document.providerUpdatedDateTime = normalized['providerUpdatedDateTime']
document.save()
def _get_by_source_id(self, model, source, docID):
return Document.objects.filter(source=source, docID=docID)
|
from __future__ import absolute_import
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webview.settings")
import django
import logging
from api.webview.models import Document
from scrapi import events
from scrapi.processing.base import BaseProcessor
django.setup()
logger = logging.getLogger(__name__)
class PostgresProcessor(BaseProcessor):
NAME = 'postgres'
@events.logged(events.PROCESSING, 'raw.postgres')
def process_raw(self, raw_doc):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.raw = raw_doc.attributes
document.save()
@events.logged(events.PROCESSING, 'normalized.postgres')
def process_normalized(self, raw_doc, normalized):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.normalized = normalized.attributes
document.providerUpdatedDateTime = normalized['providerUpdatedDateTime']
document.save()
def _get_by_source_id(self, model, source, docID):
return Document.objects.filter(source=source, docID=docID)
|
Add django setup for some initialization
|
Add django setup for some initialization
|
Python
|
apache-2.0
|
CenterForOpenScience/scrapi,fabianvf/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,erinspace/scrapi,felliott/scrapi,mehanig/scrapi,fabianvf/scrapi,felliott/scrapi
|
from __future__ import absolute_import
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webview.settings")
+ import django
import logging
from api.webview.models import Document
from scrapi import events
from scrapi.processing.base import BaseProcessor
+
+ django.setup()
logger = logging.getLogger(__name__)
class PostgresProcessor(BaseProcessor):
NAME = 'postgres'
@events.logged(events.PROCESSING, 'raw.postgres')
def process_raw(self, raw_doc):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.raw = raw_doc.attributes
document.save()
@events.logged(events.PROCESSING, 'normalized.postgres')
def process_normalized(self, raw_doc, normalized):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.normalized = normalized.attributes
document.providerUpdatedDateTime = normalized['providerUpdatedDateTime']
document.save()
def _get_by_source_id(self, model, source, docID):
return Document.objects.filter(source=source, docID=docID)
|
Add django setup for some initialization
|
## Code Before:
from __future__ import absolute_import
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webview.settings")
import logging
from api.webview.models import Document
from scrapi import events
from scrapi.processing.base import BaseProcessor
logger = logging.getLogger(__name__)
class PostgresProcessor(BaseProcessor):
NAME = 'postgres'
@events.logged(events.PROCESSING, 'raw.postgres')
def process_raw(self, raw_doc):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.raw = raw_doc.attributes
document.save()
@events.logged(events.PROCESSING, 'normalized.postgres')
def process_normalized(self, raw_doc, normalized):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.normalized = normalized.attributes
document.providerUpdatedDateTime = normalized['providerUpdatedDateTime']
document.save()
def _get_by_source_id(self, model, source, docID):
return Document.objects.filter(source=source, docID=docID)
## Instruction:
Add django setup for some initialization
## Code After:
from __future__ import absolute_import
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webview.settings")
import django
import logging
from api.webview.models import Document
from scrapi import events
from scrapi.processing.base import BaseProcessor
django.setup()
logger = logging.getLogger(__name__)
class PostgresProcessor(BaseProcessor):
NAME = 'postgres'
@events.logged(events.PROCESSING, 'raw.postgres')
def process_raw(self, raw_doc):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.raw = raw_doc.attributes
document.save()
@events.logged(events.PROCESSING, 'normalized.postgres')
def process_normalized(self, raw_doc, normalized):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.normalized = normalized.attributes
document.providerUpdatedDateTime = normalized['providerUpdatedDateTime']
document.save()
def _get_by_source_id(self, model, source, docID):
return Document.objects.filter(source=source, docID=docID)
|
from __future__ import absolute_import
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webview.settings")
+ import django
import logging
from api.webview.models import Document
from scrapi import events
from scrapi.processing.base import BaseProcessor
+
+ django.setup()
logger = logging.getLogger(__name__)
class PostgresProcessor(BaseProcessor):
NAME = 'postgres'
@events.logged(events.PROCESSING, 'raw.postgres')
def process_raw(self, raw_doc):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.raw = raw_doc.attributes
document.save()
@events.logged(events.PROCESSING, 'normalized.postgres')
def process_normalized(self, raw_doc, normalized):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.normalized = normalized.attributes
document.providerUpdatedDateTime = normalized['providerUpdatedDateTime']
document.save()
def _get_by_source_id(self, model, source, docID):
return Document.objects.filter(source=source, docID=docID)
|
eff924e07999bd0aaaa36373c658efb1ffefe5c7
|
magpie/utils/solr.py
|
magpie/utils/solr.py
|
from mysolr import Solr
from magpie.settings import settings
_solr = None
def open_solr_connection(core_name):
global _solr
if not _solr:
url = '{}/{}'.format(settings.SOLR_URL, core_name)
_solr = Solr(url)
return _solr
|
from mysolr import Solr
from magpie.settings import settings
_solr = None
def open_solr_connection(core_name):
global _solr
if not _solr:
url = '{}/{}'.format(settings.SOLR_URL, core_name)
_solr = Solr(url)
return _solr
def escape_solr_query(query):
"""
Escape special chars for Solr queries.
"""
chars = ['+', '-', '&&', '||', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?',
':', '/', ' ']
for char in chars:
query = query.replace(char, '\{}'.format(char))
return query
|
Add method to escape special chars in Solr queries
|
Add method to escape special chars in Solr queries
|
Python
|
apache-2.0
|
nimiq/moogle-project
|
from mysolr import Solr
from magpie.settings import settings
_solr = None
def open_solr_connection(core_name):
global _solr
if not _solr:
url = '{}/{}'.format(settings.SOLR_URL, core_name)
_solr = Solr(url)
return _solr
+
+
+ def escape_solr_query(query):
+ """
+ Escape special chars for Solr queries.
+ """
+ chars = ['+', '-', '&&', '||', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?',
+ ':', '/', ' ']
+ for char in chars:
+ query = query.replace(char, '\{}'.format(char))
+
+ return query
|
Add method to escape special chars in Solr queries
|
## Code Before:
from mysolr import Solr
from magpie.settings import settings
_solr = None
def open_solr_connection(core_name):
global _solr
if not _solr:
url = '{}/{}'.format(settings.SOLR_URL, core_name)
_solr = Solr(url)
return _solr
## Instruction:
Add method to escape special chars in Solr queries
## Code After:
from mysolr import Solr
from magpie.settings import settings
_solr = None
def open_solr_connection(core_name):
global _solr
if not _solr:
url = '{}/{}'.format(settings.SOLR_URL, core_name)
_solr = Solr(url)
return _solr
def escape_solr_query(query):
"""
Escape special chars for Solr queries.
"""
chars = ['+', '-', '&&', '||', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?',
':', '/', ' ']
for char in chars:
query = query.replace(char, '\{}'.format(char))
return query
|
from mysolr import Solr
from magpie.settings import settings
_solr = None
def open_solr_connection(core_name):
global _solr
if not _solr:
url = '{}/{}'.format(settings.SOLR_URL, core_name)
_solr = Solr(url)
return _solr
+
+
+ def escape_solr_query(query):
+ """
+ Escape special chars for Solr queries.
+ """
+ chars = ['+', '-', '&&', '||', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?',
+ ':', '/', ' ']
+ for char in chars:
+ query = query.replace(char, '\{}'.format(char))
+
+ return query
|
9b8c1f35d057bbf6e336434bd028cb0b2673afb8
|
installer/installer_config/admin.py
|
installer/installer_config/admin.py
|
from django.contrib import admin
from installer_config.models import Package, TerminalPrompt, EnvironmentProfile
class PackageAdmin(admin.ModelAdmin):
model = Package
list_display = ('display_name', 'version', 'website')
class TerminalPromptAdmin(admin.ModelAdmin):
model = TerminalPrompt
list_display = ('display_name', 'install_name', 'description')
admin.site.register(Package, PackageAdmin)
admin.site.register(TerminalPrompt, TerminalPromptAdmin)
# admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
|
from django.contrib import admin
from installer_config.models import Package, TerminalPrompt, EnvironmentProfile
class PackageAdmin(admin.ModelAdmin):
model = Package
list_display = ('display_name', 'version', 'website')
class TerminalPromptAdmin(admin.ModelAdmin):
model = TerminalPrompt
list_display = ('display_name', 'install_name', 'description')
class EnvironmentProfileAdmin(admin.ModelAdmin):
model = EnvironmentProfile
admin.site.register(Package, PackageAdmin)
admin.site.register(TerminalPrompt, TerminalPromptAdmin)
admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
|
Add Environment Profile to Admin view
|
Add Environment Profile to Admin view
|
Python
|
mit
|
ezPy-co/ezpy,ezPy-co/ezpy,alibulota/Package_Installer,alibulota/Package_Installer
|
from django.contrib import admin
from installer_config.models import Package, TerminalPrompt, EnvironmentProfile
class PackageAdmin(admin.ModelAdmin):
model = Package
list_display = ('display_name', 'version', 'website')
class TerminalPromptAdmin(admin.ModelAdmin):
model = TerminalPrompt
list_display = ('display_name', 'install_name', 'description')
+ class EnvironmentProfileAdmin(admin.ModelAdmin):
+ model = EnvironmentProfile
+
+
admin.site.register(Package, PackageAdmin)
admin.site.register(TerminalPrompt, TerminalPromptAdmin)
- # admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
+ admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
|
Add Environment Profile to Admin view
|
## Code Before:
from django.contrib import admin
from installer_config.models import Package, TerminalPrompt, EnvironmentProfile
class PackageAdmin(admin.ModelAdmin):
model = Package
list_display = ('display_name', 'version', 'website')
class TerminalPromptAdmin(admin.ModelAdmin):
model = TerminalPrompt
list_display = ('display_name', 'install_name', 'description')
admin.site.register(Package, PackageAdmin)
admin.site.register(TerminalPrompt, TerminalPromptAdmin)
# admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
## Instruction:
Add Environment Profile to Admin view
## Code After:
from django.contrib import admin
from installer_config.models import Package, TerminalPrompt, EnvironmentProfile
class PackageAdmin(admin.ModelAdmin):
model = Package
list_display = ('display_name', 'version', 'website')
class TerminalPromptAdmin(admin.ModelAdmin):
model = TerminalPrompt
list_display = ('display_name', 'install_name', 'description')
class EnvironmentProfileAdmin(admin.ModelAdmin):
model = EnvironmentProfile
admin.site.register(Package, PackageAdmin)
admin.site.register(TerminalPrompt, TerminalPromptAdmin)
admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
|
from django.contrib import admin
from installer_config.models import Package, TerminalPrompt, EnvironmentProfile
class PackageAdmin(admin.ModelAdmin):
model = Package
list_display = ('display_name', 'version', 'website')
class TerminalPromptAdmin(admin.ModelAdmin):
model = TerminalPrompt
list_display = ('display_name', 'install_name', 'description')
+ class EnvironmentProfileAdmin(admin.ModelAdmin):
+ model = EnvironmentProfile
+
+
admin.site.register(Package, PackageAdmin)
admin.site.register(TerminalPrompt, TerminalPromptAdmin)
- # admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
? --
+ admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
|
df5040b728ec59f9f548c7bd032d9e8b7ab0c2e0
|
database/queries/update_queries.py
|
database/queries/update_queries.py
|
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
|
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
DELETE_RESERVATION = '''
UPDATE RESERVATION
SET USER_ID=?, PROJECTION_ID=?, ROW=?, COL=?
WHERE RESERVATION.ID=?;
'''
|
Add update queries for reservation
|
Add update queries for reservation
|
Python
|
mit
|
BrickText/JHROM
|
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
+ DELETE_RESERVATION = '''
+ UPDATE RESERVATION
+ SET USER_ID=?, PROJECTION_ID=?, ROW=?, COL=?
+ WHERE RESERVATION.ID=?;
+ '''
+
|
Add update queries for reservation
|
## Code Before:
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
## Instruction:
Add update queries for reservation
## Code After:
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
DELETE_RESERVATION = '''
UPDATE RESERVATION
SET USER_ID=?, PROJECTION_ID=?, ROW=?, COL=?
WHERE RESERVATION.ID=?;
'''
|
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
+
+ DELETE_RESERVATION = '''
+ UPDATE RESERVATION
+ SET USER_ID=?, PROJECTION_ID=?, ROW=?, COL=?
+ WHERE RESERVATION.ID=?;
+ '''
|
26dcd1ce43864de77c1cd26065c09cc2b4c4788e
|
tests/fuzzer/test_random_content.py
|
tests/fuzzer/test_random_content.py
|
import pytest
import fuzzinator
@pytest.mark.parametrize('fuzzer_kwargs, exp_min_len, exp_max_len', [
({}, 1, 1),
({'max_length': '100'}, 1, 100),
({'min_length': '10', 'max_length': '100'}, 10, 100),
])
def test_random_content(fuzzer_kwargs, exp_min_len, exp_max_len):
for index in range(100):
out = fuzzinator.fuzzer.RandomContent(index=index, **fuzzer_kwargs)
out_len = len(out)
assert out_len >= exp_min_len and out_len <= exp_max_len
|
import pytest
import fuzzinator
@pytest.mark.parametrize('fuzzer_kwargs, exp_min_len, exp_max_len', [
({}, 1, 1),
({'max_length': '100'}, 1, 100),
({'min_length': '10', 'max_length': '100'}, 10, 100),
])
def test_random_content(fuzzer_kwargs, exp_min_len, exp_max_len):
for index in range(100):
out = fuzzinator.fuzzer.RandomContent(index=index, **fuzzer_kwargs)
out_len = len(out)
assert exp_min_len <= out_len <= exp_max_len
|
Make use of chained comparisons
|
Make use of chained comparisons
|
Python
|
bsd-3-clause
|
akosthekiss/fuzzinator,akosthekiss/fuzzinator,akosthekiss/fuzzinator,renatahodovan/fuzzinator,renatahodovan/fuzzinator,renatahodovan/fuzzinator,akosthekiss/fuzzinator,renatahodovan/fuzzinator
|
import pytest
import fuzzinator
@pytest.mark.parametrize('fuzzer_kwargs, exp_min_len, exp_max_len', [
({}, 1, 1),
({'max_length': '100'}, 1, 100),
({'min_length': '10', 'max_length': '100'}, 10, 100),
])
def test_random_content(fuzzer_kwargs, exp_min_len, exp_max_len):
for index in range(100):
out = fuzzinator.fuzzer.RandomContent(index=index, **fuzzer_kwargs)
out_len = len(out)
- assert out_len >= exp_min_len and out_len <= exp_max_len
+ assert exp_min_len <= out_len <= exp_max_len
|
Make use of chained comparisons
|
## Code Before:
import pytest
import fuzzinator
@pytest.mark.parametrize('fuzzer_kwargs, exp_min_len, exp_max_len', [
({}, 1, 1),
({'max_length': '100'}, 1, 100),
({'min_length': '10', 'max_length': '100'}, 10, 100),
])
def test_random_content(fuzzer_kwargs, exp_min_len, exp_max_len):
for index in range(100):
out = fuzzinator.fuzzer.RandomContent(index=index, **fuzzer_kwargs)
out_len = len(out)
assert out_len >= exp_min_len and out_len <= exp_max_len
## Instruction:
Make use of chained comparisons
## Code After:
import pytest
import fuzzinator
@pytest.mark.parametrize('fuzzer_kwargs, exp_min_len, exp_max_len', [
({}, 1, 1),
({'max_length': '100'}, 1, 100),
({'min_length': '10', 'max_length': '100'}, 10, 100),
])
def test_random_content(fuzzer_kwargs, exp_min_len, exp_max_len):
for index in range(100):
out = fuzzinator.fuzzer.RandomContent(index=index, **fuzzer_kwargs)
out_len = len(out)
assert exp_min_len <= out_len <= exp_max_len
|
import pytest
import fuzzinator
@pytest.mark.parametrize('fuzzer_kwargs, exp_min_len, exp_max_len', [
({}, 1, 1),
({'max_length': '100'}, 1, 100),
({'min_length': '10', 'max_length': '100'}, 10, 100),
])
def test_random_content(fuzzer_kwargs, exp_min_len, exp_max_len):
for index in range(100):
out = fuzzinator.fuzzer.RandomContent(index=index, **fuzzer_kwargs)
out_len = len(out)
- assert out_len >= exp_min_len and out_len <= exp_max_len
? ----------- ^^^
+ assert exp_min_len <= out_len <= exp_max_len
? ^^
|
d08973c3854d10755e156b1457972a8aaebb251b
|
bottle_utils/form/__init__.py
|
bottle_utils/form/__init__.py
|
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import Validator, Required, DateValidator, InRangeValidator
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
|
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import (Validator, Required, DateValidator, InRangeValidator,
LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
|
Include LengthValidator in list of exporeted objects
|
Include LengthValidator in list of exporeted objects
Signed-off-by: Branko Vukelic <[email protected]>
|
Python
|
bsd-2-clause
|
Outernet-Project/bottle-utils
|
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
- from .validators import Validator, Required, DateValidator, InRangeValidator
+ from .validators import (Validator, Required, DateValidator, InRangeValidator,
+ LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
|
Include LengthValidator in list of exporeted objects
|
## Code Before:
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import Validator, Required, DateValidator, InRangeValidator
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
## Instruction:
Include LengthValidator in list of exporeted objects
## Code After:
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import (Validator, Required, DateValidator, InRangeValidator,
LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
|
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
- from .validators import Validator, Required, DateValidator, InRangeValidator
+ from .validators import (Validator, Required, DateValidator, InRangeValidator,
? + +
+ LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
|
8a6144fc3918856cb2259f65f9ee5cc9cfaf1fdc
|
locustfile.py
|
locustfile.py
|
from locust import HttpLocust, TaskSet, task
class UserBehavior(TaskSet):
tasks = []
def on_start(self):
pass
@task
def index(self):
self.client.get("/")
@task
def move_map(self):
self.client.get("")
@task
def select_scene(self):
# Get url
self.client.get()
@task
def render_preview(self):
self.client.get()
@task
def render_full(self):
self.client.get()
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 1000
max_wait = 5000
|
from locust import HttpLocust, TaskSet, task
from bs4 import BeautifulSoup
from requests import Session
import random
class UserBehavior(TaskSet):
def on_start(self):
pass
@task
def index(self):
self.client.get("/")
@task
def move_map(self):
lat = random.uniform(-1, 1)
lon = random.uniform(-1, 1)
response = self.client.post(
url="/ajax",
data={'lat': lat, 'lng': lng,}
)
self.client.get("")
@task
def select_scene(self):
# Get url
soup = BeautifulSoup(self.client.get(""))
self.client.get()
@task
def render_preview(self):
self.client.get()
@task
def render_full(self):
self.client.get()
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 1000
max_wait = 5000
|
Add random functionality to map move.
|
Add random functionality to map move.
|
Python
|
mit
|
recombinators/snapsat,recombinators/snapsat,recombinators/snapsat
|
from locust import HttpLocust, TaskSet, task
+ from bs4 import BeautifulSoup
+ from requests import Session
+ import random
class UserBehavior(TaskSet):
- tasks = []
-
def on_start(self):
pass
@task
def index(self):
self.client.get("/")
@task
def move_map(self):
+ lat = random.uniform(-1, 1)
+ lon = random.uniform(-1, 1)
+ response = self.client.post(
+ url="/ajax",
+ data={'lat': lat, 'lng': lng,}
+ )
+
self.client.get("")
- @task
+ @task
- def select_scene(self):
+ def select_scene(self):
- # Get url
+ # Get url
+ soup = BeautifulSoup(self.client.get(""))
- self.client.get()
-
- @task
- def render_preview(self):
self.client.get()
- @task
+ @task
+ def render_preview(self):
+ self.client.get()
+
+ @task
- def render_full(self):
+ def render_full(self):
- self.client.get()
+ self.client.get()
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 1000
max_wait = 5000
|
Add random functionality to map move.
|
## Code Before:
from locust import HttpLocust, TaskSet, task
class UserBehavior(TaskSet):
tasks = []
def on_start(self):
pass
@task
def index(self):
self.client.get("/")
@task
def move_map(self):
self.client.get("")
@task
def select_scene(self):
# Get url
self.client.get()
@task
def render_preview(self):
self.client.get()
@task
def render_full(self):
self.client.get()
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 1000
max_wait = 5000
## Instruction:
Add random functionality to map move.
## Code After:
from locust import HttpLocust, TaskSet, task
from bs4 import BeautifulSoup
from requests import Session
import random
class UserBehavior(TaskSet):
def on_start(self):
pass
@task
def index(self):
self.client.get("/")
@task
def move_map(self):
lat = random.uniform(-1, 1)
lon = random.uniform(-1, 1)
response = self.client.post(
url="/ajax",
data={'lat': lat, 'lng': lng,}
)
self.client.get("")
@task
def select_scene(self):
# Get url
soup = BeautifulSoup(self.client.get(""))
self.client.get()
@task
def render_preview(self):
self.client.get()
@task
def render_full(self):
self.client.get()
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 1000
max_wait = 5000
|
from locust import HttpLocust, TaskSet, task
+ from bs4 import BeautifulSoup
+ from requests import Session
+ import random
class UserBehavior(TaskSet):
- tasks = []
-
def on_start(self):
pass
@task
def index(self):
self.client.get("/")
@task
def move_map(self):
+ lat = random.uniform(-1, 1)
+ lon = random.uniform(-1, 1)
+ response = self.client.post(
+ url="/ajax",
+ data={'lat': lat, 'lng': lng,}
+ )
+
self.client.get("")
- @task
+ @task
? ++++
- def select_scene(self):
+ def select_scene(self):
? ++++
- # Get url
+ # Get url
? ++++
+ soup = BeautifulSoup(self.client.get(""))
- self.client.get()
-
- @task
- def render_preview(self):
self.client.get()
- @task
+ @task
? ++++
+ def render_preview(self):
+ self.client.get()
+
+ @task
- def render_full(self):
+ def render_full(self):
? ++++
- self.client.get()
+ self.client.get()
? ++++
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 1000
max_wait = 5000
|
69d6d87688d9f805689407b839c4fb88f397269e
|
cla_backend/apps/status/views.py
|
cla_backend/apps/status/views.py
|
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
db_ready = False
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
Revert "Deliberately break status check"
|
Revert "Deliberately break status check"
This reverts commit da7f671ec287afc0c42f58794053b8bf69ddf620.
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
- db_ready = False
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
Revert "Deliberately break status check"
|
## Code Before:
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
db_ready = False
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
## Instruction:
Revert "Deliberately break status check"
## Code After:
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
- db_ready = False
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
c5128bb5dd059580f46647cfe881f1b2c154f62f
|
tests/config_test.py
|
tests/config_test.py
|
import os
from testfixtures import tempdir
import unittest
import yaml
from i2ssh.config import Config
FILENAME = '.i2sshrc'
class ConfigTest(unittest.TestCase):
@tempdir()
def test_cluster(self, tmpdir):
cluster_config = {'hosts': ['host1']}
full_config = {'mycluster': cluster_config}
tmpdir.write(FILENAME, yaml.dump(full_config, default_flow_style=False))
config = Config(os.path.join(tmpdir.path, FILENAME))
self.assertEquals(cluster_config, config.cluster('mycluster'))
|
import os
from testfixtures import tempdir
import unittest
import yaml
from i2ssh.config import Config
FILENAME = '.i2sshrc'
class ConfigTest(unittest.TestCase):
@tempdir()
def test_cluster(self, tmpdir):
cluster_config = {'hosts': ['host1']}
full_config = {'mycluster': cluster_config}
tmpdir.write(FILENAME, yaml.dump(full_config, default_flow_style=False).encode('utf-8'))
config = Config(os.path.join(tmpdir.path, FILENAME))
self.assertEquals(cluster_config, config.cluster('mycluster'))
|
Fix python3 build: Set byte encoding when writing to file.
|
Fix python3 build: Set byte encoding when writing to file.
|
Python
|
apache-2.0
|
mbruggmann/i2ssh
|
import os
from testfixtures import tempdir
import unittest
import yaml
from i2ssh.config import Config
FILENAME = '.i2sshrc'
class ConfigTest(unittest.TestCase):
@tempdir()
def test_cluster(self, tmpdir):
cluster_config = {'hosts': ['host1']}
full_config = {'mycluster': cluster_config}
- tmpdir.write(FILENAME, yaml.dump(full_config, default_flow_style=False))
+ tmpdir.write(FILENAME, yaml.dump(full_config, default_flow_style=False).encode('utf-8'))
config = Config(os.path.join(tmpdir.path, FILENAME))
self.assertEquals(cluster_config, config.cluster('mycluster'))
|
Fix python3 build: Set byte encoding when writing to file.
|
## Code Before:
import os
from testfixtures import tempdir
import unittest
import yaml
from i2ssh.config import Config
FILENAME = '.i2sshrc'
class ConfigTest(unittest.TestCase):
@tempdir()
def test_cluster(self, tmpdir):
cluster_config = {'hosts': ['host1']}
full_config = {'mycluster': cluster_config}
tmpdir.write(FILENAME, yaml.dump(full_config, default_flow_style=False))
config = Config(os.path.join(tmpdir.path, FILENAME))
self.assertEquals(cluster_config, config.cluster('mycluster'))
## Instruction:
Fix python3 build: Set byte encoding when writing to file.
## Code After:
import os
from testfixtures import tempdir
import unittest
import yaml
from i2ssh.config import Config
FILENAME = '.i2sshrc'
class ConfigTest(unittest.TestCase):
@tempdir()
def test_cluster(self, tmpdir):
cluster_config = {'hosts': ['host1']}
full_config = {'mycluster': cluster_config}
tmpdir.write(FILENAME, yaml.dump(full_config, default_flow_style=False).encode('utf-8'))
config = Config(os.path.join(tmpdir.path, FILENAME))
self.assertEquals(cluster_config, config.cluster('mycluster'))
|
import os
from testfixtures import tempdir
import unittest
import yaml
from i2ssh.config import Config
FILENAME = '.i2sshrc'
class ConfigTest(unittest.TestCase):
@tempdir()
def test_cluster(self, tmpdir):
cluster_config = {'hosts': ['host1']}
full_config = {'mycluster': cluster_config}
- tmpdir.write(FILENAME, yaml.dump(full_config, default_flow_style=False))
+ tmpdir.write(FILENAME, yaml.dump(full_config, default_flow_style=False).encode('utf-8'))
? +++++++++++++++ +
config = Config(os.path.join(tmpdir.path, FILENAME))
self.assertEquals(cluster_config, config.cluster('mycluster'))
|
adf71b59168c81240258a2b344e4bea1f6377e7b
|
etools/apps/uptime/forms/report_forms.py
|
etools/apps/uptime/forms/report_forms.py
|
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
Fix minimum date for uptime:reports
|
Fix minimum date for uptime:reports
|
Python
|
bsd-3-clause
|
Igelinmist/etools,Igelinmist/etools
|
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
- "pickTime": False}),
+ "pickTime": False,
+ "startDate": "1/1/1953"}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
- "pickTime": False}),
+ "pickTime": False,
+ "startDate": "1/1/1953"}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
Fix minimum date for uptime:reports
|
## Code Before:
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
## Instruction:
Fix minimum date for uptime:reports
## Code After:
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
- "pickTime": False}),
? --
+ "pickTime": False,
+ "startDate": "1/1/1953"}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
- "pickTime": False}),
? --
+ "pickTime": False,
+ "startDate": "1/1/1953"}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
e9e4c622ff667e475986e1544ec78b0604b8a511
|
girder_worker/tasks.py
|
girder_worker/tasks.py
|
import core
from girder_worker.utils import JobStatus
from .app import app
def _cleanup(*args, **kwargs):
core.events.trigger('cleanup')
@app.task(name='girder_worker.run', bind=True, after_return=_cleanup)
def run(tasks, *pargs, **kwargs):
jobInfo = kwargs.pop('jobInfo', {})
retval = 0
kwargs['_job_manager'] = task.job_manager \
if hasattr(task, 'job_manager') else None
kwargs['status'] = JobStatus.RUNNING
return core.run(*pargs, **kwargs)
@app.task(name='girder_worker.convert')
def convert(*pargs, **kwargs):
return core.convert(*pargs, **kwargs)
@app.task(name='girder_worker.validators')
def validators(*pargs, **kwargs):
_type, _format = pargs
nodes = []
for (node, data) in core.format.conv_graph.nodes(data=True):
if ((_type is None) or (_type == node.type)) and \
((_format is None) or (_format == node.format)):
nodes.append({'type': node.type,
'format': node.format,
'validator': data})
return nodes
|
import core
from girder_worker.utils import JobStatus
from .app import app
def _cleanup(*args, **kwargs):
core.events.trigger('cleanup')
@app.task(name='girder_worker.run', bind=True, after_return=_cleanup)
def run(task, *pargs, **kwargs):
kwargs['_job_manager'] = task.job_manager \
if hasattr(task, 'job_manager') else None
kwargs['status'] = JobStatus.RUNNING
return core.run(*pargs, **kwargs)
@app.task(name='girder_worker.convert')
def convert(*pargs, **kwargs):
return core.convert(*pargs, **kwargs)
@app.task(name='girder_worker.validators')
def validators(*pargs, **kwargs):
_type, _format = pargs
nodes = []
for (node, data) in core.format.conv_graph.nodes(data=True):
if ((_type is None) or (_type == node.type)) and \
((_format is None) or (_format == node.format)):
nodes.append({'type': node.type,
'format': node.format,
'validator': data})
return nodes
|
Fix typo from bad conflict resolution during merge
|
Fix typo from bad conflict resolution during merge
|
Python
|
apache-2.0
|
girder/girder_worker,girder/girder_worker,girder/girder_worker
|
import core
from girder_worker.utils import JobStatus
from .app import app
def _cleanup(*args, **kwargs):
core.events.trigger('cleanup')
@app.task(name='girder_worker.run', bind=True, after_return=_cleanup)
- def run(tasks, *pargs, **kwargs):
+ def run(task, *pargs, **kwargs):
- jobInfo = kwargs.pop('jobInfo', {})
- retval = 0
-
kwargs['_job_manager'] = task.job_manager \
if hasattr(task, 'job_manager') else None
kwargs['status'] = JobStatus.RUNNING
return core.run(*pargs, **kwargs)
@app.task(name='girder_worker.convert')
def convert(*pargs, **kwargs):
return core.convert(*pargs, **kwargs)
@app.task(name='girder_worker.validators')
def validators(*pargs, **kwargs):
_type, _format = pargs
nodes = []
for (node, data) in core.format.conv_graph.nodes(data=True):
if ((_type is None) or (_type == node.type)) and \
((_format is None) or (_format == node.format)):
nodes.append({'type': node.type,
'format': node.format,
'validator': data})
return nodes
|
Fix typo from bad conflict resolution during merge
|
## Code Before:
import core
from girder_worker.utils import JobStatus
from .app import app
def _cleanup(*args, **kwargs):
core.events.trigger('cleanup')
@app.task(name='girder_worker.run', bind=True, after_return=_cleanup)
def run(tasks, *pargs, **kwargs):
jobInfo = kwargs.pop('jobInfo', {})
retval = 0
kwargs['_job_manager'] = task.job_manager \
if hasattr(task, 'job_manager') else None
kwargs['status'] = JobStatus.RUNNING
return core.run(*pargs, **kwargs)
@app.task(name='girder_worker.convert')
def convert(*pargs, **kwargs):
return core.convert(*pargs, **kwargs)
@app.task(name='girder_worker.validators')
def validators(*pargs, **kwargs):
_type, _format = pargs
nodes = []
for (node, data) in core.format.conv_graph.nodes(data=True):
if ((_type is None) or (_type == node.type)) and \
((_format is None) or (_format == node.format)):
nodes.append({'type': node.type,
'format': node.format,
'validator': data})
return nodes
## Instruction:
Fix typo from bad conflict resolution during merge
## Code After:
import core
from girder_worker.utils import JobStatus
from .app import app
def _cleanup(*args, **kwargs):
core.events.trigger('cleanup')
@app.task(name='girder_worker.run', bind=True, after_return=_cleanup)
def run(task, *pargs, **kwargs):
kwargs['_job_manager'] = task.job_manager \
if hasattr(task, 'job_manager') else None
kwargs['status'] = JobStatus.RUNNING
return core.run(*pargs, **kwargs)
@app.task(name='girder_worker.convert')
def convert(*pargs, **kwargs):
return core.convert(*pargs, **kwargs)
@app.task(name='girder_worker.validators')
def validators(*pargs, **kwargs):
_type, _format = pargs
nodes = []
for (node, data) in core.format.conv_graph.nodes(data=True):
if ((_type is None) or (_type == node.type)) and \
((_format is None) or (_format == node.format)):
nodes.append({'type': node.type,
'format': node.format,
'validator': data})
return nodes
|
import core
from girder_worker.utils import JobStatus
from .app import app
def _cleanup(*args, **kwargs):
core.events.trigger('cleanup')
@app.task(name='girder_worker.run', bind=True, after_return=_cleanup)
- def run(tasks, *pargs, **kwargs):
? -
+ def run(task, *pargs, **kwargs):
- jobInfo = kwargs.pop('jobInfo', {})
- retval = 0
-
kwargs['_job_manager'] = task.job_manager \
if hasattr(task, 'job_manager') else None
kwargs['status'] = JobStatus.RUNNING
return core.run(*pargs, **kwargs)
@app.task(name='girder_worker.convert')
def convert(*pargs, **kwargs):
return core.convert(*pargs, **kwargs)
@app.task(name='girder_worker.validators')
def validators(*pargs, **kwargs):
_type, _format = pargs
nodes = []
for (node, data) in core.format.conv_graph.nodes(data=True):
if ((_type is None) or (_type == node.type)) and \
((_format is None) or (_format == node.format)):
nodes.append({'type': node.type,
'format': node.format,
'validator': data})
return nodes
|
674f6e0b9fbb76684a9b05d16a5da0d4cc732b1d
|
scripts/analysis/plot_tracking_vector_estimator_stats.py
|
scripts/analysis/plot_tracking_vector_estimator_stats.py
|
import numpy as np
import matplotlib.pyplot as plt
import argparse
import sys
import os
parser = argparse.ArgumentParser(
prog='plot_tracking_vector_estimator')
parser.add_argument('directory', type=str, help='Data directory')
args = parser.parse_args()
data = np.genfromtxt(
os.path.join(
args.directory,
'tracking_vector_estimator'),
delimiter=',', names=True)
state_labels = ['Marker_x', 'Marker_y', 'Marker_z', 'Velocity_x', 'Velocity_y', 'Velocity_z']
noise_labels = ['Noise_x', 'Noise_y', 'Noise_z', 'Noise_vx', 'Noise_vy', 'Noise_vz']
meas_labels = ['Measured_Marker_x', 'Measured_Marker_y', 'Measured_Marker_y', 'Measured_Velocity_x', 'Measured_Velocity_y', 'Measured_Velocity_z']
ts = (data['Time'] - data['Time'][0]) / 1e9
plt.figure(1)
for i in range(6):
plt.subplot(2, 3, i+1)
plt.plot(ts, data[meas_labels[i]])
plt.errorbar(ts, data[state_labels[i]], yerr=data[noise_labels[i]])
plt.ylabel(state_labels[i])
plt.xlabel('Time (seconds)')
plt.legend([meas_labels[i], state_labels[i]])
plt.show()
|
import numpy as np
import matplotlib.pyplot as plt
import argparse
import sys
import os
parser = argparse.ArgumentParser(
prog='plot_tracking_vector_estimator')
parser.add_argument('directory', type=str, help='Data directory')
args = parser.parse_args()
data = np.genfromtxt(
os.path.join(
args.directory,
'tracking_vector_estimator'),
delimiter=',', names=True)
state_labels = ['Marker_x', 'Marker_y', 'Marker_z']
noise_labels = ['Noise_x', 'Noise_y', 'Noise_z']
meas_labels = ['Measured_Marker_x', 'Measured_Marker_y', 'Measured_Marker_y']
meas_noise_labels = ['Meas_noise_x', 'Meas_noise_y', 'Meas_noise_z']
ts = (data['Time'] - data['Time'][0]) / 1e9
plt.figure(1)
for i in range(3):
plt.subplot(2, 3, i+1)
plt.errorbar(ts, data[meas_labels[i]], yerr=data[meas_noise_labels[i]])
plt.errorbar(ts, data[state_labels[i]], yerr=data[noise_labels[i]])
plt.ylabel(state_labels[i])
plt.xlabel('Time (seconds)')
plt.legend([meas_labels[i], state_labels[i]])
plt.show()
|
Change estimator script based on modifications to estimator
|
Change estimator script based on modifications to estimator
|
Python
|
mpl-2.0
|
jhu-asco/aerial_autonomy,jhu-asco/aerial_autonomy,jhu-asco/aerial_autonomy,jhu-asco/aerial_autonomy,jhu-asco/aerial_autonomy,jhu-asco/aerial_autonomy
|
import numpy as np
import matplotlib.pyplot as plt
import argparse
import sys
import os
parser = argparse.ArgumentParser(
prog='plot_tracking_vector_estimator')
parser.add_argument('directory', type=str, help='Data directory')
args = parser.parse_args()
data = np.genfromtxt(
os.path.join(
args.directory,
'tracking_vector_estimator'),
delimiter=',', names=True)
- state_labels = ['Marker_x', 'Marker_y', 'Marker_z', 'Velocity_x', 'Velocity_y', 'Velocity_z']
- noise_labels = ['Noise_x', 'Noise_y', 'Noise_z', 'Noise_vx', 'Noise_vy', 'Noise_vz']
- meas_labels = ['Measured_Marker_x', 'Measured_Marker_y', 'Measured_Marker_y', 'Measured_Velocity_x', 'Measured_Velocity_y', 'Measured_Velocity_z']
+ state_labels = ['Marker_x', 'Marker_y', 'Marker_z']
+ noise_labels = ['Noise_x', 'Noise_y', 'Noise_z']
+ meas_labels = ['Measured_Marker_x', 'Measured_Marker_y', 'Measured_Marker_y']
+ meas_noise_labels = ['Meas_noise_x', 'Meas_noise_y', 'Meas_noise_z']
ts = (data['Time'] - data['Time'][0]) / 1e9
plt.figure(1)
- for i in range(6):
+ for i in range(3):
plt.subplot(2, 3, i+1)
- plt.plot(ts, data[meas_labels[i]])
+ plt.errorbar(ts, data[meas_labels[i]], yerr=data[meas_noise_labels[i]])
plt.errorbar(ts, data[state_labels[i]], yerr=data[noise_labels[i]])
plt.ylabel(state_labels[i])
plt.xlabel('Time (seconds)')
plt.legend([meas_labels[i], state_labels[i]])
plt.show()
|
Change estimator script based on modifications to estimator
|
## Code Before:
import numpy as np
import matplotlib.pyplot as plt
import argparse
import sys
import os
parser = argparse.ArgumentParser(
prog='plot_tracking_vector_estimator')
parser.add_argument('directory', type=str, help='Data directory')
args = parser.parse_args()
data = np.genfromtxt(
os.path.join(
args.directory,
'tracking_vector_estimator'),
delimiter=',', names=True)
state_labels = ['Marker_x', 'Marker_y', 'Marker_z', 'Velocity_x', 'Velocity_y', 'Velocity_z']
noise_labels = ['Noise_x', 'Noise_y', 'Noise_z', 'Noise_vx', 'Noise_vy', 'Noise_vz']
meas_labels = ['Measured_Marker_x', 'Measured_Marker_y', 'Measured_Marker_y', 'Measured_Velocity_x', 'Measured_Velocity_y', 'Measured_Velocity_z']
ts = (data['Time'] - data['Time'][0]) / 1e9
plt.figure(1)
for i in range(6):
plt.subplot(2, 3, i+1)
plt.plot(ts, data[meas_labels[i]])
plt.errorbar(ts, data[state_labels[i]], yerr=data[noise_labels[i]])
plt.ylabel(state_labels[i])
plt.xlabel('Time (seconds)')
plt.legend([meas_labels[i], state_labels[i]])
plt.show()
## Instruction:
Change estimator script based on modifications to estimator
## Code After:
import numpy as np
import matplotlib.pyplot as plt
import argparse
import sys
import os
parser = argparse.ArgumentParser(
prog='plot_tracking_vector_estimator')
parser.add_argument('directory', type=str, help='Data directory')
args = parser.parse_args()
data = np.genfromtxt(
os.path.join(
args.directory,
'tracking_vector_estimator'),
delimiter=',', names=True)
state_labels = ['Marker_x', 'Marker_y', 'Marker_z']
noise_labels = ['Noise_x', 'Noise_y', 'Noise_z']
meas_labels = ['Measured_Marker_x', 'Measured_Marker_y', 'Measured_Marker_y']
meas_noise_labels = ['Meas_noise_x', 'Meas_noise_y', 'Meas_noise_z']
ts = (data['Time'] - data['Time'][0]) / 1e9
plt.figure(1)
for i in range(3):
plt.subplot(2, 3, i+1)
plt.errorbar(ts, data[meas_labels[i]], yerr=data[meas_noise_labels[i]])
plt.errorbar(ts, data[state_labels[i]], yerr=data[noise_labels[i]])
plt.ylabel(state_labels[i])
plt.xlabel('Time (seconds)')
plt.legend([meas_labels[i], state_labels[i]])
plt.show()
|
import numpy as np
import matplotlib.pyplot as plt
import argparse
import sys
import os
parser = argparse.ArgumentParser(
prog='plot_tracking_vector_estimator')
parser.add_argument('directory', type=str, help='Data directory')
args = parser.parse_args()
data = np.genfromtxt(
os.path.join(
args.directory,
'tracking_vector_estimator'),
delimiter=',', names=True)
- state_labels = ['Marker_x', 'Marker_y', 'Marker_z', 'Velocity_x', 'Velocity_y', 'Velocity_z']
- noise_labels = ['Noise_x', 'Noise_y', 'Noise_z', 'Noise_vx', 'Noise_vy', 'Noise_vz']
- meas_labels = ['Measured_Marker_x', 'Measured_Marker_y', 'Measured_Marker_y', 'Measured_Velocity_x', 'Measured_Velocity_y', 'Measured_Velocity_z']
+ state_labels = ['Marker_x', 'Marker_y', 'Marker_z']
+ noise_labels = ['Noise_x', 'Noise_y', 'Noise_z']
+ meas_labels = ['Measured_Marker_x', 'Measured_Marker_y', 'Measured_Marker_y']
+ meas_noise_labels = ['Meas_noise_x', 'Meas_noise_y', 'Meas_noise_z']
ts = (data['Time'] - data['Time'][0]) / 1e9
plt.figure(1)
- for i in range(6):
? ^
+ for i in range(3):
? ^
plt.subplot(2, 3, i+1)
- plt.plot(ts, data[meas_labels[i]])
+ plt.errorbar(ts, data[meas_labels[i]], yerr=data[meas_noise_labels[i]])
plt.errorbar(ts, data[state_labels[i]], yerr=data[noise_labels[i]])
plt.ylabel(state_labels[i])
plt.xlabel('Time (seconds)')
plt.legend([meas_labels[i], state_labels[i]])
plt.show()
|
9d9704f631156e01d55d1d1217a41ab3704bdc03
|
tests/unit/test_context.py
|
tests/unit/test_context.py
|
import testtools
from openstack.common import context
class ContextTest(testtools.TestCase):
def test_context(self):
ctx = context.RequestContext()
self.assertTrue(ctx)
|
from openstack.common import context
from tests import utils
class ContextTest(utils.BaseTestCase):
def test_context(self):
ctx = context.RequestContext()
self.assertTrue(ctx)
|
Replace direct use of testtools BaseTestCase.
|
Replace direct use of testtools BaseTestCase.
Using the BaseTestCase across the tests in the tree lets us put in log
fixtures and consistently handle mox and stubout.
Part of blueprint grizzly-testtools.
Change-Id: Iba7eb2c63b0c514009b2c28e5930b27726a147b0
|
Python
|
apache-2.0
|
dims/oslo.context,JioCloud/oslo.context,citrix-openstack-build/oslo.context,varunarya10/oslo.context,openstack/oslo.context,yanheven/oslo.middleware
|
-
- import testtools
from openstack.common import context
+ from tests import utils
- class ContextTest(testtools.TestCase):
+ class ContextTest(utils.BaseTestCase):
def test_context(self):
ctx = context.RequestContext()
self.assertTrue(ctx)
|
Replace direct use of testtools BaseTestCase.
|
## Code Before:
import testtools
from openstack.common import context
class ContextTest(testtools.TestCase):
def test_context(self):
ctx = context.RequestContext()
self.assertTrue(ctx)
## Instruction:
Replace direct use of testtools BaseTestCase.
## Code After:
from openstack.common import context
from tests import utils
class ContextTest(utils.BaseTestCase):
def test_context(self):
ctx = context.RequestContext()
self.assertTrue(ctx)
|
-
- import testtools
from openstack.common import context
+ from tests import utils
- class ContextTest(testtools.TestCase):
? ^^^^^^
+ class ContextTest(utils.BaseTestCase):
? + ^ ++++
def test_context(self):
ctx = context.RequestContext()
self.assertTrue(ctx)
|
9b043b0bd31f35e140831f61a4484513922f8712
|
stop_words/__init__.py
|
stop_words/__init__.py
|
import os
__VERSION__ = (2014, 5, 26)
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
STOP_WORDS_DIR = os.path.join(CURRENT_DIR, 'stop-words/')
def get_version():
"""
:rtype: basestring
"""
return ".".join(str(v) for v in __VERSION__)
def get_stop_words(language):
"""
:type language: basestring
:rtype: list
"""
with open('{0}{1}.txt'.format(STOP_WORDS_DIR, language)) as lang_file:
lines = lang_file.readlines()
return [str(line.strip()).decode('utf-8') for line in lines]
|
import os
__VERSION__ = (2014, 5, 26)
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
STOP_WORDS_DIR = os.path.join(CURRENT_DIR, 'stop-words/')
LANGUAGE_MAPPING = {
'ar': 'arabic',
'da': 'danish',
'nl': 'dutch',
'en': 'english',
'fi': 'finnish',
'fr': 'french',
'de': 'german',
'hu': 'hungarian',
'it': 'italian',
'nb': 'norwegian',
'pt': 'portuguese',
'ro': 'romanian',
'ru': 'russian',
'es': 'spanish',
'sv': 'swedish',
'tr': 'turkish',
}
AVAILABLE_LANGUAGES = LANGUAGE_MAPPING.values()
def get_version():
"""
:rtype: basestring
"""
return ".".join(str(v) for v in __VERSION__)
class StopWordError(Exception):
pass
def get_stop_words(language):
"""
:type language: basestring
:rtype: list
"""
try:
language = LANGUAGE_MAPPING[language]
except KeyError:
pass
if language not in AVAILABLE_LANGUAGES:
raise StopWordError('%s language is unavailable')
with open('{0}{1}.txt'.format(STOP_WORDS_DIR, language)) as lang_file:
lines = lang_file.readlines()
return [str(line.strip()).decode('utf-8') for line in lines]
|
Implement language code mapping and check availability of the language
|
Implement language code mapping and check availability of the language
|
Python
|
bsd-3-clause
|
Alir3z4/python-stop-words
|
import os
__VERSION__ = (2014, 5, 26)
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
STOP_WORDS_DIR = os.path.join(CURRENT_DIR, 'stop-words/')
+ LANGUAGE_MAPPING = {
+ 'ar': 'arabic',
+ 'da': 'danish',
+ 'nl': 'dutch',
+ 'en': 'english',
+ 'fi': 'finnish',
+ 'fr': 'french',
+ 'de': 'german',
+ 'hu': 'hungarian',
+ 'it': 'italian',
+ 'nb': 'norwegian',
+ 'pt': 'portuguese',
+ 'ro': 'romanian',
+ 'ru': 'russian',
+ 'es': 'spanish',
+ 'sv': 'swedish',
+ 'tr': 'turkish',
+ }
+
+ AVAILABLE_LANGUAGES = LANGUAGE_MAPPING.values()
def get_version():
"""
:rtype: basestring
"""
return ".".join(str(v) for v in __VERSION__)
+ class StopWordError(Exception):
+ pass
+
+
def get_stop_words(language):
"""
:type language: basestring
:rtype: list
"""
+ try:
+ language = LANGUAGE_MAPPING[language]
+ except KeyError:
+ pass
+
+ if language not in AVAILABLE_LANGUAGES:
+ raise StopWordError('%s language is unavailable')
+
with open('{0}{1}.txt'.format(STOP_WORDS_DIR, language)) as lang_file:
lines = lang_file.readlines()
return [str(line.strip()).decode('utf-8') for line in lines]
|
Implement language code mapping and check availability of the language
|
## Code Before:
import os
__VERSION__ = (2014, 5, 26)
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
STOP_WORDS_DIR = os.path.join(CURRENT_DIR, 'stop-words/')
def get_version():
"""
:rtype: basestring
"""
return ".".join(str(v) for v in __VERSION__)
def get_stop_words(language):
"""
:type language: basestring
:rtype: list
"""
with open('{0}{1}.txt'.format(STOP_WORDS_DIR, language)) as lang_file:
lines = lang_file.readlines()
return [str(line.strip()).decode('utf-8') for line in lines]
## Instruction:
Implement language code mapping and check availability of the language
## Code After:
import os
__VERSION__ = (2014, 5, 26)
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
STOP_WORDS_DIR = os.path.join(CURRENT_DIR, 'stop-words/')
LANGUAGE_MAPPING = {
'ar': 'arabic',
'da': 'danish',
'nl': 'dutch',
'en': 'english',
'fi': 'finnish',
'fr': 'french',
'de': 'german',
'hu': 'hungarian',
'it': 'italian',
'nb': 'norwegian',
'pt': 'portuguese',
'ro': 'romanian',
'ru': 'russian',
'es': 'spanish',
'sv': 'swedish',
'tr': 'turkish',
}
AVAILABLE_LANGUAGES = LANGUAGE_MAPPING.values()
def get_version():
"""
:rtype: basestring
"""
return ".".join(str(v) for v in __VERSION__)
class StopWordError(Exception):
pass
def get_stop_words(language):
"""
:type language: basestring
:rtype: list
"""
try:
language = LANGUAGE_MAPPING[language]
except KeyError:
pass
if language not in AVAILABLE_LANGUAGES:
raise StopWordError('%s language is unavailable')
with open('{0}{1}.txt'.format(STOP_WORDS_DIR, language)) as lang_file:
lines = lang_file.readlines()
return [str(line.strip()).decode('utf-8') for line in lines]
|
import os
__VERSION__ = (2014, 5, 26)
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
STOP_WORDS_DIR = os.path.join(CURRENT_DIR, 'stop-words/')
+ LANGUAGE_MAPPING = {
+ 'ar': 'arabic',
+ 'da': 'danish',
+ 'nl': 'dutch',
+ 'en': 'english',
+ 'fi': 'finnish',
+ 'fr': 'french',
+ 'de': 'german',
+ 'hu': 'hungarian',
+ 'it': 'italian',
+ 'nb': 'norwegian',
+ 'pt': 'portuguese',
+ 'ro': 'romanian',
+ 'ru': 'russian',
+ 'es': 'spanish',
+ 'sv': 'swedish',
+ 'tr': 'turkish',
+ }
+
+ AVAILABLE_LANGUAGES = LANGUAGE_MAPPING.values()
def get_version():
"""
:rtype: basestring
"""
return ".".join(str(v) for v in __VERSION__)
+ class StopWordError(Exception):
+ pass
+
+
def get_stop_words(language):
"""
:type language: basestring
:rtype: list
"""
+ try:
+ language = LANGUAGE_MAPPING[language]
+ except KeyError:
+ pass
+
+ if language not in AVAILABLE_LANGUAGES:
+ raise StopWordError('%s language is unavailable')
+
with open('{0}{1}.txt'.format(STOP_WORDS_DIR, language)) as lang_file:
lines = lang_file.readlines()
return [str(line.strip()).decode('utf-8') for line in lines]
|
ad558a5acc93e1e5206ed27b2dc679089b277890
|
me_api/app.py
|
me_api/app.py
|
from __future__ import absolute_import, unicode_literals
from flask import Flask
from .middleware.me import me
from .middleware import github, keybase, medium
from .cache import cache
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
cache.init_app(app)
modules = config.modules['modules']
blueprints = {
'github': github.github_api,
'keybase': keybase.keybase_api,
'medium': medium.medium_api
}
app.register_blueprint(me)
for module in modules.keys():
app.register_blueprint(blueprints[module])
return app
|
from __future__ import absolute_import, unicode_literals
from flask import Flask
from .middleware.me import me
from .cache import cache
def _register_module(app, module):
if module == 'github':
from .middleware import github
app.register_blueprint(github.github_api)
elif module == 'keybase':
from .middleware import keybase
app.register_blueprint(keybase.keybase_api)
elif module == 'medium':
from .middleware import medium
app.register_blueprint(medium.medium_api)
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
cache.init_app(app)
modules = config.modules['modules']
app.register_blueprint(me)
for module in modules.keys():
_register_module(app, module)
return app
|
Fix giant bug: crash when don't config all modules
|
Fix giant bug: crash when don't config all modules
that's bacause you import all the modules
> from .middleware import github, keybase, medium
while each module need to get configurations from modules.json, e.g.
> config = Config.modules['modules']['github']
but can't get anything at all, so it will crash.
that's not the correct behaviour, so we just import the desired module
and then register it. The solution isn't very elegant.
|
Python
|
mit
|
lord63/me-api
|
from __future__ import absolute_import, unicode_literals
from flask import Flask
from .middleware.me import me
- from .middleware import github, keybase, medium
from .cache import cache
+
+
+ def _register_module(app, module):
+ if module == 'github':
+ from .middleware import github
+ app.register_blueprint(github.github_api)
+ elif module == 'keybase':
+ from .middleware import keybase
+ app.register_blueprint(keybase.keybase_api)
+ elif module == 'medium':
+ from .middleware import medium
+ app.register_blueprint(medium.medium_api)
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
cache.init_app(app)
modules = config.modules['modules']
- blueprints = {
- 'github': github.github_api,
- 'keybase': keybase.keybase_api,
- 'medium': medium.medium_api
- }
-
app.register_blueprint(me)
for module in modules.keys():
- app.register_blueprint(blueprints[module])
+ _register_module(app, module)
return app
|
Fix giant bug: crash when don't config all modules
|
## Code Before:
from __future__ import absolute_import, unicode_literals
from flask import Flask
from .middleware.me import me
from .middleware import github, keybase, medium
from .cache import cache
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
cache.init_app(app)
modules = config.modules['modules']
blueprints = {
'github': github.github_api,
'keybase': keybase.keybase_api,
'medium': medium.medium_api
}
app.register_blueprint(me)
for module in modules.keys():
app.register_blueprint(blueprints[module])
return app
## Instruction:
Fix giant bug: crash when don't config all modules
## Code After:
from __future__ import absolute_import, unicode_literals
from flask import Flask
from .middleware.me import me
from .cache import cache
def _register_module(app, module):
if module == 'github':
from .middleware import github
app.register_blueprint(github.github_api)
elif module == 'keybase':
from .middleware import keybase
app.register_blueprint(keybase.keybase_api)
elif module == 'medium':
from .middleware import medium
app.register_blueprint(medium.medium_api)
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
cache.init_app(app)
modules = config.modules['modules']
app.register_blueprint(me)
for module in modules.keys():
_register_module(app, module)
return app
|
from __future__ import absolute_import, unicode_literals
from flask import Flask
from .middleware.me import me
- from .middleware import github, keybase, medium
from .cache import cache
+
+
+ def _register_module(app, module):
+ if module == 'github':
+ from .middleware import github
+ app.register_blueprint(github.github_api)
+ elif module == 'keybase':
+ from .middleware import keybase
+ app.register_blueprint(keybase.keybase_api)
+ elif module == 'medium':
+ from .middleware import medium
+ app.register_blueprint(medium.medium_api)
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
cache.init_app(app)
modules = config.modules['modules']
- blueprints = {
- 'github': github.github_api,
- 'keybase': keybase.keybase_api,
- 'medium': medium.medium_api
- }
-
app.register_blueprint(me)
for module in modules.keys():
- app.register_blueprint(blueprints[module])
+ _register_module(app, module)
return app
|
8eb3c6aa123cecec826c3c07f98b2d2b84c265af
|
scrapi/registry.py
|
scrapi/registry.py
|
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __hash__(self):
return hash(self.freeze(self))
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
def freeze(self, o):
if isinstance(o, dict):
return frozenset({k: self.freeze(v) for k, v in o.items()}.items())
elif isinstance(o, list):
return tuple(map(self.freeze, o))
return o
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
Make _Registry hashable so that django can import from scrapi
|
Make _Registry hashable so that django can import from scrapi
|
Python
|
apache-2.0
|
fabianvf/scrapi,felliott/scrapi,erinspace/scrapi,mehanig/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,mehanig/scrapi,felliott/scrapi,CenterForOpenScience/scrapi
|
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
+ def __hash__(self):
+ return hash(self.freeze(self))
+
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
+
+ def freeze(self, o):
+ if isinstance(o, dict):
+ return frozenset({k: self.freeze(v) for k, v in o.items()}.items())
+ elif isinstance(o, list):
+ return tuple(map(self.freeze, o))
+ return o
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
Make _Registry hashable so that django can import from scrapi
|
## Code Before:
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
## Instruction:
Make _Registry hashable so that django can import from scrapi
## Code After:
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __hash__(self):
return hash(self.freeze(self))
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
def freeze(self, o):
if isinstance(o, dict):
return frozenset({k: self.freeze(v) for k, v in o.items()}.items())
elif isinstance(o, list):
return tuple(map(self.freeze, o))
return o
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
+ def __hash__(self):
+ return hash(self.freeze(self))
+
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
+
+ def freeze(self, o):
+ if isinstance(o, dict):
+ return frozenset({k: self.freeze(v) for k, v in o.items()}.items())
+ elif isinstance(o, list):
+ return tuple(map(self.freeze, o))
+ return o
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
c830e66431dab010309b4ad92ef38c418ec7029b
|
models.py
|
models.py
|
import datetime
from flask import url_for
from Simpoll import db
class Poll(db.Document):
created_at = db.DateTimeField(default=datetime.datetime.now, required=True)
question = db.StringField(max_length=255, required=True)
option1 = db.StringField(max_length=255, required=True)
option2 = db.StringField(max_length=255, required=True)
option1votes = db.IntField(required=True)
option2votes = db.IntField(required=True)
topscore = db.IntField(required=True)
def get_absolute_url(self):
# it's okay to use the first 7 bytes for url
# because first 4 bytes are time and next 3 are
# a machine id
return url_for('post', kwargs={"slug": self._id[0:6]})
def __unicode__(self):
return self.question
meta = {
'allow_inheritance': True,
'indexes': ['-created_at', 'slug'],
'ordering': ['-created_at']
}
|
import datetime
from flask import url_for
from Simpoll import db
class Poll(db.Document):
created_at = db.DateTimeField(default=datetime.datetime.now, required=True)
question = db.StringField(max_length=255, required=True)
option1 = db.StringField(max_length=255, required=True)
option2 = db.StringField(max_length=255, required=True)
option1votes = db.IntField(default=0, required=True)
option2votes = db.IntField(default=0, required=True)
topscore = db.IntField(default=0, required=True)
def get_absolute_url(self):
# it's okay to use the first 7 bytes for url
# because first 4 bytes are time and next 3 are
# a machine id
return url_for('post', kwargs={"slug": self._id[0:6]})
def __unicode__(self):
return self.question
meta = {
'allow_inheritance': True,
'indexes': ['-created_at', 'slug'],
'ordering': ['-created_at']
}
|
Add default votes and topscores
|
Add default votes and topscores
|
Python
|
mit
|
dpuleri/simpoll_backend,dpuleri/simpoll_backend,dpuleri/simpoll_backend,dpuleri/simpoll_backend
|
import datetime
from flask import url_for
from Simpoll import db
class Poll(db.Document):
created_at = db.DateTimeField(default=datetime.datetime.now, required=True)
question = db.StringField(max_length=255, required=True)
option1 = db.StringField(max_length=255, required=True)
option2 = db.StringField(max_length=255, required=True)
- option1votes = db.IntField(required=True)
+ option1votes = db.IntField(default=0, required=True)
- option2votes = db.IntField(required=True)
+ option2votes = db.IntField(default=0, required=True)
- topscore = db.IntField(required=True)
+ topscore = db.IntField(default=0, required=True)
def get_absolute_url(self):
# it's okay to use the first 7 bytes for url
# because first 4 bytes are time and next 3 are
# a machine id
return url_for('post', kwargs={"slug": self._id[0:6]})
def __unicode__(self):
return self.question
meta = {
'allow_inheritance': True,
'indexes': ['-created_at', 'slug'],
'ordering': ['-created_at']
}
|
Add default votes and topscores
|
## Code Before:
import datetime
from flask import url_for
from Simpoll import db
class Poll(db.Document):
created_at = db.DateTimeField(default=datetime.datetime.now, required=True)
question = db.StringField(max_length=255, required=True)
option1 = db.StringField(max_length=255, required=True)
option2 = db.StringField(max_length=255, required=True)
option1votes = db.IntField(required=True)
option2votes = db.IntField(required=True)
topscore = db.IntField(required=True)
def get_absolute_url(self):
# it's okay to use the first 7 bytes for url
# because first 4 bytes are time and next 3 are
# a machine id
return url_for('post', kwargs={"slug": self._id[0:6]})
def __unicode__(self):
return self.question
meta = {
'allow_inheritance': True,
'indexes': ['-created_at', 'slug'],
'ordering': ['-created_at']
}
## Instruction:
Add default votes and topscores
## Code After:
import datetime
from flask import url_for
from Simpoll import db
class Poll(db.Document):
created_at = db.DateTimeField(default=datetime.datetime.now, required=True)
question = db.StringField(max_length=255, required=True)
option1 = db.StringField(max_length=255, required=True)
option2 = db.StringField(max_length=255, required=True)
option1votes = db.IntField(default=0, required=True)
option2votes = db.IntField(default=0, required=True)
topscore = db.IntField(default=0, required=True)
def get_absolute_url(self):
# it's okay to use the first 7 bytes for url
# because first 4 bytes are time and next 3 are
# a machine id
return url_for('post', kwargs={"slug": self._id[0:6]})
def __unicode__(self):
return self.question
meta = {
'allow_inheritance': True,
'indexes': ['-created_at', 'slug'],
'ordering': ['-created_at']
}
|
import datetime
from flask import url_for
from Simpoll import db
class Poll(db.Document):
created_at = db.DateTimeField(default=datetime.datetime.now, required=True)
question = db.StringField(max_length=255, required=True)
option1 = db.StringField(max_length=255, required=True)
option2 = db.StringField(max_length=255, required=True)
- option1votes = db.IntField(required=True)
+ option1votes = db.IntField(default=0, required=True)
? +++++++++++
- option2votes = db.IntField(required=True)
+ option2votes = db.IntField(default=0, required=True)
? +++++++++++
- topscore = db.IntField(required=True)
+ topscore = db.IntField(default=0, required=True)
? +++++++++++
def get_absolute_url(self):
# it's okay to use the first 7 bytes for url
# because first 4 bytes are time and next 3 are
# a machine id
return url_for('post', kwargs={"slug": self._id[0:6]})
def __unicode__(self):
return self.question
meta = {
'allow_inheritance': True,
'indexes': ['-created_at', 'slug'],
'ordering': ['-created_at']
}
|
30e567adb809810930616493fd92ef1c40c9207b
|
dthm4kaiako/users/forms.py
|
dthm4kaiako/users/forms.py
|
"""Forms for user application."""
from django.forms import ModelForm
from django.contrib.auth import get_user_model, forms
User = get_user_model()
class SignupForm(ModelForm):
"""Sign up for user registration."""
class Meta:
"""Metadata for SignupForm class."""
model = get_user_model()
fields = ['first_name', 'last_name']
def signup(self, request, user):
"""Extra logic when a user signs up.
Required by django-allauth.
"""
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
class UserChangeForm(forms.UserChangeForm):
"""Form class for changing user."""
class Meta(forms.UserChangeForm.Meta):
"""Metadata for UserChangeForm class."""
model = User
fields = ('email', 'last_name')
class UserCreationForm(forms.UserCreationForm):
"""Form class for creating user."""
class Meta(forms.UserCreationForm.Meta):
"""Metadata for UserCreationForm class."""
model = User
fields = ('email', 'first_name', 'last_name')
|
"""Forms for user application."""
from django.forms import ModelForm
from django.contrib.auth import get_user_model, forms
from captcha.fields import ReCaptchaField
from captcha.widgets import ReCaptchaV3
User = get_user_model()
class SignupForm(ModelForm):
"""Sign up for user registration."""
captcha = ReCaptchaField(widget=ReCaptchaV3, label='')
class Meta:
"""Metadata for SignupForm class."""
model = get_user_model()
fields = ['first_name', 'last_name']
def signup(self, request, user):
"""Extra logic when a user signs up.
Required by django-allauth.
"""
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
class UserChangeForm(forms.UserChangeForm):
"""Form class for changing user."""
class Meta(forms.UserChangeForm.Meta):
"""Metadata for UserChangeForm class."""
model = User
fields = ('email', 'last_name')
class UserCreationForm(forms.UserCreationForm):
"""Form class for creating user."""
class Meta(forms.UserCreationForm.Meta):
"""Metadata for UserCreationForm class."""
model = User
fields = ('email', 'first_name', 'last_name')
|
Add recaptcha to signup page
|
Add recaptcha to signup page
Signup page is currently not used, but
doing it now in case it is forgotten later.
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
"""Forms for user application."""
from django.forms import ModelForm
from django.contrib.auth import get_user_model, forms
+ from captcha.fields import ReCaptchaField
+ from captcha.widgets import ReCaptchaV3
User = get_user_model()
class SignupForm(ModelForm):
"""Sign up for user registration."""
+
+ captcha = ReCaptchaField(widget=ReCaptchaV3, label='')
class Meta:
"""Metadata for SignupForm class."""
model = get_user_model()
fields = ['first_name', 'last_name']
def signup(self, request, user):
"""Extra logic when a user signs up.
Required by django-allauth.
"""
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
class UserChangeForm(forms.UserChangeForm):
"""Form class for changing user."""
class Meta(forms.UserChangeForm.Meta):
"""Metadata for UserChangeForm class."""
model = User
fields = ('email', 'last_name')
class UserCreationForm(forms.UserCreationForm):
"""Form class for creating user."""
class Meta(forms.UserCreationForm.Meta):
"""Metadata for UserCreationForm class."""
model = User
fields = ('email', 'first_name', 'last_name')
|
Add recaptcha to signup page
|
## Code Before:
"""Forms for user application."""
from django.forms import ModelForm
from django.contrib.auth import get_user_model, forms
User = get_user_model()
class SignupForm(ModelForm):
"""Sign up for user registration."""
class Meta:
"""Metadata for SignupForm class."""
model = get_user_model()
fields = ['first_name', 'last_name']
def signup(self, request, user):
"""Extra logic when a user signs up.
Required by django-allauth.
"""
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
class UserChangeForm(forms.UserChangeForm):
"""Form class for changing user."""
class Meta(forms.UserChangeForm.Meta):
"""Metadata for UserChangeForm class."""
model = User
fields = ('email', 'last_name')
class UserCreationForm(forms.UserCreationForm):
"""Form class for creating user."""
class Meta(forms.UserCreationForm.Meta):
"""Metadata for UserCreationForm class."""
model = User
fields = ('email', 'first_name', 'last_name')
## Instruction:
Add recaptcha to signup page
## Code After:
"""Forms for user application."""
from django.forms import ModelForm
from django.contrib.auth import get_user_model, forms
from captcha.fields import ReCaptchaField
from captcha.widgets import ReCaptchaV3
User = get_user_model()
class SignupForm(ModelForm):
"""Sign up for user registration."""
captcha = ReCaptchaField(widget=ReCaptchaV3, label='')
class Meta:
"""Metadata for SignupForm class."""
model = get_user_model()
fields = ['first_name', 'last_name']
def signup(self, request, user):
"""Extra logic when a user signs up.
Required by django-allauth.
"""
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
class UserChangeForm(forms.UserChangeForm):
"""Form class for changing user."""
class Meta(forms.UserChangeForm.Meta):
"""Metadata for UserChangeForm class."""
model = User
fields = ('email', 'last_name')
class UserCreationForm(forms.UserCreationForm):
"""Form class for creating user."""
class Meta(forms.UserCreationForm.Meta):
"""Metadata for UserCreationForm class."""
model = User
fields = ('email', 'first_name', 'last_name')
|
"""Forms for user application."""
from django.forms import ModelForm
from django.contrib.auth import get_user_model, forms
+ from captcha.fields import ReCaptchaField
+ from captcha.widgets import ReCaptchaV3
User = get_user_model()
class SignupForm(ModelForm):
"""Sign up for user registration."""
+
+ captcha = ReCaptchaField(widget=ReCaptchaV3, label='')
class Meta:
"""Metadata for SignupForm class."""
model = get_user_model()
fields = ['first_name', 'last_name']
def signup(self, request, user):
"""Extra logic when a user signs up.
Required by django-allauth.
"""
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
class UserChangeForm(forms.UserChangeForm):
"""Form class for changing user."""
class Meta(forms.UserChangeForm.Meta):
"""Metadata for UserChangeForm class."""
model = User
fields = ('email', 'last_name')
class UserCreationForm(forms.UserCreationForm):
"""Form class for creating user."""
class Meta(forms.UserCreationForm.Meta):
"""Metadata for UserCreationForm class."""
model = User
fields = ('email', 'first_name', 'last_name')
|
fbf42c288a6faa13ac918047eac09985cbd6f6e0
|
cal/v1/network/drivers/openstack_network.py
|
cal/v1/network/drivers/openstack_network.py
|
from neutronclient.v2_0 import client
from network_driver import NetworkDriver
class OpenstackNetWorkDriver(NetworkDriver):
"""docstring for OpenstackNetWorkDriver"""
def __init__(self, auth_url, project_name,
username, password, user_domain_name=None,
project_domain_name=None, driver_name=None):
super(OpenstackNetWorkDriver, self).__init__()
self.provider = "OPENSTACK"
self.auth_url = auth_url
self.project_domain_name = project_domain_name
self.user_domain_name = user_domain_name
self.project_name = project_name
self.username = username
self.password = password
if driver_name:
self.driver_name = driver_name
else:
self.driver_name = "default"
self._setup()
def _setup(self):
self.client = client.Client(
username=self.username,
password=self.password,
tenant_name=self.project_name,
auth_url=self.auth_url
)
def create(self):
raise NotImplementedError
def show(self):
raise NotImplementedError
def list(self):
raise NotImplementedError
def update(self):
raise NotImplementedError
def delete(self):
raise NotImplementedError
|
from neutronclient.v2_0 import client
from network_driver import NetworkDriver
class OpenstackNetWorkDriver(NetworkDriver):
"""docstring for OpenstackNetWorkDriver"""
def __init__(self, auth_url, project_name,
username, password, **kargs):
super(OpenstackNetWorkDriver, self).__init__()
self.provider = "OPENSTACK"
self.auth_url = auth_url
self.project_name = project_name
self.username = username
self.password = password
self.driver_name = kargs.pop('driver_name', 'default')
self._setup()
def _setup(self):
self.client = client.Client(
username=self.username,
password=self.password,
project_name=self.project_name,
auth_url=self.auth_url
)
def create(self, network):
return self.client.create_network({'network': network})
def show(self, network_id):
return self.client.show_network(network_id)
def list(self, retrieve_all=True, **kargs):
return self.client.list_networks(retrieve_all, **kargs)
def update(self, network_id, network):
return self.client.update_network(network_id, {'network': network})
def delete(self, network_id):
return self.client.delete_network(network_id)
|
Add neutron client without test
|
Add neutron client without test
|
Python
|
apache-2.0
|
cloudcomputinghust/CAL
|
from neutronclient.v2_0 import client
from network_driver import NetworkDriver
class OpenstackNetWorkDriver(NetworkDriver):
+
"""docstring for OpenstackNetWorkDriver"""
def __init__(self, auth_url, project_name,
+ username, password, **kargs):
- username, password, user_domain_name=None,
- project_domain_name=None, driver_name=None):
super(OpenstackNetWorkDriver, self).__init__()
self.provider = "OPENSTACK"
self.auth_url = auth_url
- self.project_domain_name = project_domain_name
- self.user_domain_name = user_domain_name
self.project_name = project_name
self.username = username
self.password = password
+ self.driver_name = kargs.pop('driver_name', 'default')
- if driver_name:
- self.driver_name = driver_name
- else:
- self.driver_name = "default"
-
self._setup()
def _setup(self):
self.client = client.Client(
username=self.username,
password=self.password,
- tenant_name=self.project_name,
+ project_name=self.project_name,
auth_url=self.auth_url
)
- def create(self):
+ def create(self, network):
- raise NotImplementedError
+ return self.client.create_network({'network': network})
- def show(self):
+ def show(self, network_id):
- raise NotImplementedError
+ return self.client.show_network(network_id)
- def list(self):
- raise NotImplementedError
+ def list(self, retrieve_all=True, **kargs):
+ return self.client.list_networks(retrieve_all, **kargs)
- def update(self):
- raise NotImplementedError
+ def update(self, network_id, network):
+ return self.client.update_network(network_id, {'network': network})
- def delete(self):
+ def delete(self, network_id):
- raise NotImplementedError
+ return self.client.delete_network(network_id)
|
Add neutron client without test
|
## Code Before:
from neutronclient.v2_0 import client
from network_driver import NetworkDriver
class OpenstackNetWorkDriver(NetworkDriver):
"""docstring for OpenstackNetWorkDriver"""
def __init__(self, auth_url, project_name,
username, password, user_domain_name=None,
project_domain_name=None, driver_name=None):
super(OpenstackNetWorkDriver, self).__init__()
self.provider = "OPENSTACK"
self.auth_url = auth_url
self.project_domain_name = project_domain_name
self.user_domain_name = user_domain_name
self.project_name = project_name
self.username = username
self.password = password
if driver_name:
self.driver_name = driver_name
else:
self.driver_name = "default"
self._setup()
def _setup(self):
self.client = client.Client(
username=self.username,
password=self.password,
tenant_name=self.project_name,
auth_url=self.auth_url
)
def create(self):
raise NotImplementedError
def show(self):
raise NotImplementedError
def list(self):
raise NotImplementedError
def update(self):
raise NotImplementedError
def delete(self):
raise NotImplementedError
## Instruction:
Add neutron client without test
## Code After:
from neutronclient.v2_0 import client
from network_driver import NetworkDriver
class OpenstackNetWorkDriver(NetworkDriver):
"""docstring for OpenstackNetWorkDriver"""
def __init__(self, auth_url, project_name,
username, password, **kargs):
super(OpenstackNetWorkDriver, self).__init__()
self.provider = "OPENSTACK"
self.auth_url = auth_url
self.project_name = project_name
self.username = username
self.password = password
self.driver_name = kargs.pop('driver_name', 'default')
self._setup()
def _setup(self):
self.client = client.Client(
username=self.username,
password=self.password,
project_name=self.project_name,
auth_url=self.auth_url
)
def create(self, network):
return self.client.create_network({'network': network})
def show(self, network_id):
return self.client.show_network(network_id)
def list(self, retrieve_all=True, **kargs):
return self.client.list_networks(retrieve_all, **kargs)
def update(self, network_id, network):
return self.client.update_network(network_id, {'network': network})
def delete(self, network_id):
return self.client.delete_network(network_id)
|
from neutronclient.v2_0 import client
from network_driver import NetworkDriver
class OpenstackNetWorkDriver(NetworkDriver):
+
"""docstring for OpenstackNetWorkDriver"""
def __init__(self, auth_url, project_name,
+ username, password, **kargs):
- username, password, user_domain_name=None,
- project_domain_name=None, driver_name=None):
super(OpenstackNetWorkDriver, self).__init__()
self.provider = "OPENSTACK"
self.auth_url = auth_url
- self.project_domain_name = project_domain_name
- self.user_domain_name = user_domain_name
self.project_name = project_name
self.username = username
self.password = password
+ self.driver_name = kargs.pop('driver_name', 'default')
- if driver_name:
- self.driver_name = driver_name
- else:
- self.driver_name = "default"
-
self._setup()
def _setup(self):
self.client = client.Client(
username=self.username,
password=self.password,
- tenant_name=self.project_name,
? ^ ^^^
+ project_name=self.project_name,
? ^^^^ ^
auth_url=self.auth_url
)
- def create(self):
+ def create(self, network):
? +++++++++
- raise NotImplementedError
+ return self.client.create_network({'network': network})
- def show(self):
+ def show(self, network_id):
? ++++++++++++
- raise NotImplementedError
+ return self.client.show_network(network_id)
- def list(self):
- raise NotImplementedError
+ def list(self, retrieve_all=True, **kargs):
+ return self.client.list_networks(retrieve_all, **kargs)
- def update(self):
- raise NotImplementedError
+ def update(self, network_id, network):
+ return self.client.update_network(network_id, {'network': network})
- def delete(self):
+ def delete(self, network_id):
? ++++++++++++
- raise NotImplementedError
+ return self.client.delete_network(network_id)
|
3eb3cc047f2f5a358066eac8f806580089d70df2
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='dscsrf',
version='1.0',
description='Global double-submit Flask CSRF',
packages=['dscsrf'],
py_modules=['flask'],
)
|
from distutils.core import setup
setup(name='dscsrf',
version='1.0',
description='Global double-submit Flask CSRF',
packages=['dscsrf'],
py_modules=['flask'],
author='sc4reful',
url = 'https://github.com/sc4reful/dscsrf',
keywords = ['security', 'flask', 'website', 'csrf'],
download_url = 'https://github.com/sc4reful/dscsrf/tarball/1.0',
)
|
Prepare for tagging for PyPI
|
Prepare for tagging for PyPI
|
Python
|
mit
|
wkoathp/dscsrf
|
from distutils.core import setup
setup(name='dscsrf',
version='1.0',
description='Global double-submit Flask CSRF',
packages=['dscsrf'],
py_modules=['flask'],
+ author='sc4reful',
+ url = 'https://github.com/sc4reful/dscsrf',
+ keywords = ['security', 'flask', 'website', 'csrf'],
+ download_url = 'https://github.com/sc4reful/dscsrf/tarball/1.0',
)
|
Prepare for tagging for PyPI
|
## Code Before:
from distutils.core import setup
setup(name='dscsrf',
version='1.0',
description='Global double-submit Flask CSRF',
packages=['dscsrf'],
py_modules=['flask'],
)
## Instruction:
Prepare for tagging for PyPI
## Code After:
from distutils.core import setup
setup(name='dscsrf',
version='1.0',
description='Global double-submit Flask CSRF',
packages=['dscsrf'],
py_modules=['flask'],
author='sc4reful',
url = 'https://github.com/sc4reful/dscsrf',
keywords = ['security', 'flask', 'website', 'csrf'],
download_url = 'https://github.com/sc4reful/dscsrf/tarball/1.0',
)
|
from distutils.core import setup
setup(name='dscsrf',
version='1.0',
description='Global double-submit Flask CSRF',
packages=['dscsrf'],
py_modules=['flask'],
+ author='sc4reful',
+ url = 'https://github.com/sc4reful/dscsrf',
+ keywords = ['security', 'flask', 'website', 'csrf'],
+ download_url = 'https://github.com/sc4reful/dscsrf/tarball/1.0',
)
|
d66a6325d210b075ed9aed7b2446aaf079df7936
|
blackbelt/tasks.py
|
blackbelt/tasks.py
|
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(context_settings=CONTEXT_SETTINGS, help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
Add alias -h to --help
|
Add alias -h to --help
|
Python
|
mit
|
apiaryio/black-belt
|
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
+ CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
- cli = BlackBelt(help='Black Belt: automate project The Apiary Way. Please provide a command.')
+ cli = BlackBelt(context_settings=CONTEXT_SETTINGS, help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
Add alias -h to --help
|
## Code Before:
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
## Instruction:
Add alias -h to --help
## Code After:
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(context_settings=CONTEXT_SETTINGS, help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
+ CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
- cli = BlackBelt(help='Black Belt: automate project The Apiary Way. Please provide a command.')
+ cli = BlackBelt(context_settings=CONTEXT_SETTINGS, help='Black Belt: automate project The Apiary Way. Please provide a command.')
? +++++++++++++++++++++++++++++++++++
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
c3bac71b19842d9010390996c094119ed25566ab
|
class_namespaces/scope_proxy.py
|
class_namespaces/scope_proxy.py
|
"""Base class for Namespace proxies in class creation."""
import weakref
from . import ops
from .proxy import _Proxy
_PROXY_INFOS = weakref.WeakKeyDictionary()
class _ScopeProxy(_Proxy):
"""Proxy object for manipulating namespaces during class creation."""
__slots__ = '__weakref__',
def __init__(self, dct, container):
_PROXY_INFOS[self] = container
container[self] = dct
def __dir__(self):
# This line will fire if dir(ns) is done during class creation.
return _PROXY_INFOS[self][self]
def __getattribute__(self, name):
dct = _PROXY_INFOS[self][self]
try:
return dct[name]
# These lines will fire if a non-existent namespace attribute is gotten
# during class creation.
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
_PROXY_INFOS[self][self][name] = value
def __delattr__(self, name):
ops.delete(_PROXY_INFOS[self][self], name)
def __enter__(self):
return _PROXY_INFOS[self][self].__enter__()
def __exit__(self, exc_type, exc_value, traceback):
return _PROXY_INFOS[self][self].__exit__(
exc_type, exc_value, traceback)
|
"""Base class for Namespace proxies in class creation."""
import weakref
from . import ops
from .proxy import _Proxy
_PROXY_INFOS = weakref.WeakKeyDictionary()
class _ScopeProxy(_Proxy):
"""Proxy object for manipulating namespaces during class creation."""
__slots__ = '__weakref__',
def __init__(self, dct, container):
_PROXY_INFOS[self] = container
container[self] = dct
def __dir__(self):
# This line will fire if dir(ns) is done during class creation.
return _PROXY_INFOS[self][self]
def __getattribute__(self, name):
# Have to add some dependencies back...
from .namespaces import Namespace
dct = _PROXY_INFOS[self][self]
try:
value = dct[name]
# These lines will fire if a non-existent namespace attribute is gotten
# during class creation.
except KeyError:
raise AttributeError(name)
if isinstance(value, Namespace):
value = type(self)(value)
return value
def __setattr__(self, name, value):
_PROXY_INFOS[self][self][name] = value
def __delattr__(self, name):
ops.delete(_PROXY_INFOS[self][self], name)
def __enter__(self):
return _PROXY_INFOS[self][self].__enter__()
def __exit__(self, exc_type, exc_value, traceback):
return _PROXY_INFOS[self][self].__exit__(
exc_type, exc_value, traceback)
|
Fix for bug. Overall somewhat unfortunate.
|
Fix for bug. Overall somewhat unfortunate.
|
Python
|
mit
|
mwchase/class-namespaces,mwchase/class-namespaces
|
"""Base class for Namespace proxies in class creation."""
import weakref
from . import ops
from .proxy import _Proxy
_PROXY_INFOS = weakref.WeakKeyDictionary()
class _ScopeProxy(_Proxy):
"""Proxy object for manipulating namespaces during class creation."""
__slots__ = '__weakref__',
def __init__(self, dct, container):
_PROXY_INFOS[self] = container
container[self] = dct
def __dir__(self):
# This line will fire if dir(ns) is done during class creation.
return _PROXY_INFOS[self][self]
def __getattribute__(self, name):
+ # Have to add some dependencies back...
+ from .namespaces import Namespace
dct = _PROXY_INFOS[self][self]
try:
- return dct[name]
+ value = dct[name]
# These lines will fire if a non-existent namespace attribute is gotten
# during class creation.
except KeyError:
raise AttributeError(name)
+ if isinstance(value, Namespace):
+ value = type(self)(value)
+ return value
def __setattr__(self, name, value):
_PROXY_INFOS[self][self][name] = value
def __delattr__(self, name):
ops.delete(_PROXY_INFOS[self][self], name)
def __enter__(self):
return _PROXY_INFOS[self][self].__enter__()
def __exit__(self, exc_type, exc_value, traceback):
return _PROXY_INFOS[self][self].__exit__(
exc_type, exc_value, traceback)
|
Fix for bug. Overall somewhat unfortunate.
|
## Code Before:
"""Base class for Namespace proxies in class creation."""
import weakref
from . import ops
from .proxy import _Proxy
_PROXY_INFOS = weakref.WeakKeyDictionary()
class _ScopeProxy(_Proxy):
"""Proxy object for manipulating namespaces during class creation."""
__slots__ = '__weakref__',
def __init__(self, dct, container):
_PROXY_INFOS[self] = container
container[self] = dct
def __dir__(self):
# This line will fire if dir(ns) is done during class creation.
return _PROXY_INFOS[self][self]
def __getattribute__(self, name):
dct = _PROXY_INFOS[self][self]
try:
return dct[name]
# These lines will fire if a non-existent namespace attribute is gotten
# during class creation.
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
_PROXY_INFOS[self][self][name] = value
def __delattr__(self, name):
ops.delete(_PROXY_INFOS[self][self], name)
def __enter__(self):
return _PROXY_INFOS[self][self].__enter__()
def __exit__(self, exc_type, exc_value, traceback):
return _PROXY_INFOS[self][self].__exit__(
exc_type, exc_value, traceback)
## Instruction:
Fix for bug. Overall somewhat unfortunate.
## Code After:
"""Base class for Namespace proxies in class creation."""
import weakref
from . import ops
from .proxy import _Proxy
_PROXY_INFOS = weakref.WeakKeyDictionary()
class _ScopeProxy(_Proxy):
"""Proxy object for manipulating namespaces during class creation."""
__slots__ = '__weakref__',
def __init__(self, dct, container):
_PROXY_INFOS[self] = container
container[self] = dct
def __dir__(self):
# This line will fire if dir(ns) is done during class creation.
return _PROXY_INFOS[self][self]
def __getattribute__(self, name):
# Have to add some dependencies back...
from .namespaces import Namespace
dct = _PROXY_INFOS[self][self]
try:
value = dct[name]
# These lines will fire if a non-existent namespace attribute is gotten
# during class creation.
except KeyError:
raise AttributeError(name)
if isinstance(value, Namespace):
value = type(self)(value)
return value
def __setattr__(self, name, value):
_PROXY_INFOS[self][self][name] = value
def __delattr__(self, name):
ops.delete(_PROXY_INFOS[self][self], name)
def __enter__(self):
return _PROXY_INFOS[self][self].__enter__()
def __exit__(self, exc_type, exc_value, traceback):
return _PROXY_INFOS[self][self].__exit__(
exc_type, exc_value, traceback)
|
"""Base class for Namespace proxies in class creation."""
import weakref
from . import ops
from .proxy import _Proxy
_PROXY_INFOS = weakref.WeakKeyDictionary()
class _ScopeProxy(_Proxy):
"""Proxy object for manipulating namespaces during class creation."""
__slots__ = '__weakref__',
def __init__(self, dct, container):
_PROXY_INFOS[self] = container
container[self] = dct
def __dir__(self):
# This line will fire if dir(ns) is done during class creation.
return _PROXY_INFOS[self][self]
def __getattribute__(self, name):
+ # Have to add some dependencies back...
+ from .namespaces import Namespace
dct = _PROXY_INFOS[self][self]
try:
- return dct[name]
? ^ ^^^^
+ value = dct[name]
? ^^^^ ^^
# These lines will fire if a non-existent namespace attribute is gotten
# during class creation.
except KeyError:
raise AttributeError(name)
+ if isinstance(value, Namespace):
+ value = type(self)(value)
+ return value
def __setattr__(self, name, value):
_PROXY_INFOS[self][self][name] = value
def __delattr__(self, name):
ops.delete(_PROXY_INFOS[self][self], name)
def __enter__(self):
return _PROXY_INFOS[self][self].__enter__()
def __exit__(self, exc_type, exc_value, traceback):
return _PROXY_INFOS[self][self].__exit__(
exc_type, exc_value, traceback)
|
27112881583e53d790e66d31a2bb4d2a996ee405
|
python/sparknlp/functions.py
|
python/sparknlp/functions.py
|
from pyspark.sql.functions import udf
from pyspark.sql.types import *
from pyspark.sql import DataFrame
import sys
import sparknlp
def map_annotations(f, output_type: DataType):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
output_type
)
def map_annotations_strict(f):
from sparknlp.annotation import Annotation
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
ArrayType(Annotation.dataType())
)
def map_annotations_col(dataframe: DataFrame, f, column, output_column, output_type):
dataframe.withColumn(output_column, map_annotations(f, output_type)(column))
def filter_by_annotations_col(dataframe, f, column):
this_udf = udf(
lambda content: f(content),
BooleanType()
)
return dataframe.filter(this_udf(column))
def explode_annotations_col(dataframe: DataFrame, column, output_column):
from pyspark.sql.functions import explode
return dataframe.withColumn(output_column, explode(column))
|
from pyspark.sql.functions import udf
from pyspark.sql.types import *
from pyspark.sql import DataFrame
from sparknlp.annotation import Annotation
import sys
import sparknlp
def map_annotations(f, output_type: DataType):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
output_type
)
def map_annotations_strict(f):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
ArrayType(Annotation.dataType())
)
def map_annotations_col(dataframe: DataFrame, f, column, output_column, output_type):
dataframe.withColumn(output_column, map_annotations(f, output_type)(column))
def filter_by_annotations_col(dataframe, f, column):
this_udf = udf(
lambda content: f(content),
BooleanType()
)
return dataframe.filter(this_udf(column))
def explode_annotations_col(dataframe: DataFrame, column, output_column):
from pyspark.sql.functions import explode
return dataframe.withColumn(output_column, explode(column))
|
Move import to top level to avoid import fail after fist time on sys.modules hack
|
Move import to top level to avoid import fail after fist time on sys.modules hack
|
Python
|
apache-2.0
|
JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp
|
from pyspark.sql.functions import udf
from pyspark.sql.types import *
from pyspark.sql import DataFrame
+ from sparknlp.annotation import Annotation
import sys
import sparknlp
def map_annotations(f, output_type: DataType):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
output_type
)
def map_annotations_strict(f):
- from sparknlp.annotation import Annotation
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
ArrayType(Annotation.dataType())
)
def map_annotations_col(dataframe: DataFrame, f, column, output_column, output_type):
dataframe.withColumn(output_column, map_annotations(f, output_type)(column))
def filter_by_annotations_col(dataframe, f, column):
this_udf = udf(
lambda content: f(content),
BooleanType()
)
return dataframe.filter(this_udf(column))
def explode_annotations_col(dataframe: DataFrame, column, output_column):
from pyspark.sql.functions import explode
return dataframe.withColumn(output_column, explode(column))
|
Move import to top level to avoid import fail after fist time on sys.modules hack
|
## Code Before:
from pyspark.sql.functions import udf
from pyspark.sql.types import *
from pyspark.sql import DataFrame
import sys
import sparknlp
def map_annotations(f, output_type: DataType):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
output_type
)
def map_annotations_strict(f):
from sparknlp.annotation import Annotation
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
ArrayType(Annotation.dataType())
)
def map_annotations_col(dataframe: DataFrame, f, column, output_column, output_type):
dataframe.withColumn(output_column, map_annotations(f, output_type)(column))
def filter_by_annotations_col(dataframe, f, column):
this_udf = udf(
lambda content: f(content),
BooleanType()
)
return dataframe.filter(this_udf(column))
def explode_annotations_col(dataframe: DataFrame, column, output_column):
from pyspark.sql.functions import explode
return dataframe.withColumn(output_column, explode(column))
## Instruction:
Move import to top level to avoid import fail after fist time on sys.modules hack
## Code After:
from pyspark.sql.functions import udf
from pyspark.sql.types import *
from pyspark.sql import DataFrame
from sparknlp.annotation import Annotation
import sys
import sparknlp
def map_annotations(f, output_type: DataType):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
output_type
)
def map_annotations_strict(f):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
ArrayType(Annotation.dataType())
)
def map_annotations_col(dataframe: DataFrame, f, column, output_column, output_type):
dataframe.withColumn(output_column, map_annotations(f, output_type)(column))
def filter_by_annotations_col(dataframe, f, column):
this_udf = udf(
lambda content: f(content),
BooleanType()
)
return dataframe.filter(this_udf(column))
def explode_annotations_col(dataframe: DataFrame, column, output_column):
from pyspark.sql.functions import explode
return dataframe.withColumn(output_column, explode(column))
|
from pyspark.sql.functions import udf
from pyspark.sql.types import *
from pyspark.sql import DataFrame
+ from sparknlp.annotation import Annotation
import sys
import sparknlp
def map_annotations(f, output_type: DataType):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
output_type
)
def map_annotations_strict(f):
- from sparknlp.annotation import Annotation
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
ArrayType(Annotation.dataType())
)
def map_annotations_col(dataframe: DataFrame, f, column, output_column, output_type):
dataframe.withColumn(output_column, map_annotations(f, output_type)(column))
def filter_by_annotations_col(dataframe, f, column):
this_udf = udf(
lambda content: f(content),
BooleanType()
)
return dataframe.filter(this_udf(column))
def explode_annotations_col(dataframe: DataFrame, column, output_column):
from pyspark.sql.functions import explode
return dataframe.withColumn(output_column, explode(column))
|
98c0ccec77cc6f1657c21acb3cdc07b483a9a178
|
proselint/checks/writegood/lexical_illusions.py
|
proselint/checks/writegood/lexical_illusions.py
|
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "WGD105"
msg = u"There's a lexical illusion here: a word is repeated."
commercialese = [
"the\sthe",
"is\sis"
]
return existence_check(text, commercialese, err, msg)
|
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "WGD105"
msg = u"There's a lexical illusion here: a word is repeated."
commercialese = [
"the\sthe",
]
return existence_check(text, commercialese, err, msg)
|
Remove "is is" from lexical illusions
|
Remove "is is" from lexical illusions
|
Python
|
bsd-3-clause
|
jstewmon/proselint,jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint
|
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "WGD105"
msg = u"There's a lexical illusion here: a word is repeated."
commercialese = [
"the\sthe",
- "is\sis"
]
return existence_check(text, commercialese, err, msg)
|
Remove "is is" from lexical illusions
|
## Code Before:
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "WGD105"
msg = u"There's a lexical illusion here: a word is repeated."
commercialese = [
"the\sthe",
"is\sis"
]
return existence_check(text, commercialese, err, msg)
## Instruction:
Remove "is is" from lexical illusions
## Code After:
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "WGD105"
msg = u"There's a lexical illusion here: a word is repeated."
commercialese = [
"the\sthe",
]
return existence_check(text, commercialese, err, msg)
|
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "WGD105"
msg = u"There's a lexical illusion here: a word is repeated."
commercialese = [
"the\sthe",
- "is\sis"
]
return existence_check(text, commercialese, err, msg)
|
02522262692554a499d7c0fbc8f2efe4361023f1
|
bmi_ilamb/__init__.py
|
bmi_ilamb/__init__.py
|
import os
from .bmi_ilamb import BmiIlamb
__all__ = ['BmiIlamb']
__version__ = 0.1
package_dir = os.path.dirname(__file__)
data_dir = os.path.join(package_dir, 'data')
|
import os
from .bmi_ilamb import BmiIlamb
from .config import Configuration
__all__ = ['BmiIlamb', 'Configuration']
__version__ = 0.1
package_dir = os.path.dirname(__file__)
data_dir = os.path.join(package_dir, 'data')
|
Add Configuration to package definition
|
Add Configuration to package definition
|
Python
|
mit
|
permamodel/bmi-ilamb
|
import os
from .bmi_ilamb import BmiIlamb
+ from .config import Configuration
- __all__ = ['BmiIlamb']
+ __all__ = ['BmiIlamb', 'Configuration']
__version__ = 0.1
package_dir = os.path.dirname(__file__)
data_dir = os.path.join(package_dir, 'data')
|
Add Configuration to package definition
|
## Code Before:
import os
from .bmi_ilamb import BmiIlamb
__all__ = ['BmiIlamb']
__version__ = 0.1
package_dir = os.path.dirname(__file__)
data_dir = os.path.join(package_dir, 'data')
## Instruction:
Add Configuration to package definition
## Code After:
import os
from .bmi_ilamb import BmiIlamb
from .config import Configuration
__all__ = ['BmiIlamb', 'Configuration']
__version__ = 0.1
package_dir = os.path.dirname(__file__)
data_dir = os.path.join(package_dir, 'data')
|
import os
from .bmi_ilamb import BmiIlamb
+ from .config import Configuration
- __all__ = ['BmiIlamb']
+ __all__ = ['BmiIlamb', 'Configuration']
__version__ = 0.1
package_dir = os.path.dirname(__file__)
data_dir = os.path.join(package_dir, 'data')
|
3f5bd92a11ed69592e21888838088f8baa0d6575
|
makefiles_plain_python.py
|
makefiles_plain_python.py
|
def make_a_file(i):
f = "out_python/{:05d}".format(i)
try:
with open(f, "w") as fp:
fp.write("hello")
# print("wrote", f)
except:
print("failed on", f)
def make_a_lot_of_files(how_many):
for i in range(how_many):
make_a_file(i)
return None
print(make_a_lot_of_files(100000))
|
import asyncio
@asyncio.coroutine
def make_a_file(i):
f = "out_python/{:05d}".format(i)
try:
with open(f, "w") as fp:
fp.write("hello")
# print("wrote", f)
except:
print("failed on", f)
def make_a_lot_of_files(how_many):
loop = asyncio.get_event_loop()
tasks = [asyncio.ensure_future(make_a_file(i)) for i in range(how_many)]
loop.run_until_complete(asyncio.wait(tasks))
if __name__ == "__main__":
from sys import argv
how_many = int(argv[1]) if len(argv) == 2 else 100000
make_a_lot_of_files(how_many)
|
Use asyncio in plain python example.
|
Use asyncio in plain python example.
|
Python
|
mit
|
reedwade/python-golang-linking,reedwade/python-golang-linking
|
+ import asyncio
+
+ @asyncio.coroutine
def make_a_file(i):
f = "out_python/{:05d}".format(i)
try:
with open(f, "w") as fp:
fp.write("hello")
# print("wrote", f)
except:
print("failed on", f)
def make_a_lot_of_files(how_many):
- for i in range(how_many):
- make_a_file(i)
- return None
+ loop = asyncio.get_event_loop()
+ tasks = [asyncio.ensure_future(make_a_file(i)) for i in range(how_many)]
+ loop.run_until_complete(asyncio.wait(tasks))
- print(make_a_lot_of_files(100000))
+ if __name__ == "__main__":
+ from sys import argv
+ how_many = int(argv[1]) if len(argv) == 2 else 100000
+ make_a_lot_of_files(how_many)
+
|
Use asyncio in plain python example.
|
## Code Before:
def make_a_file(i):
f = "out_python/{:05d}".format(i)
try:
with open(f, "w") as fp:
fp.write("hello")
# print("wrote", f)
except:
print("failed on", f)
def make_a_lot_of_files(how_many):
for i in range(how_many):
make_a_file(i)
return None
print(make_a_lot_of_files(100000))
## Instruction:
Use asyncio in plain python example.
## Code After:
import asyncio
@asyncio.coroutine
def make_a_file(i):
f = "out_python/{:05d}".format(i)
try:
with open(f, "w") as fp:
fp.write("hello")
# print("wrote", f)
except:
print("failed on", f)
def make_a_lot_of_files(how_many):
loop = asyncio.get_event_loop()
tasks = [asyncio.ensure_future(make_a_file(i)) for i in range(how_many)]
loop.run_until_complete(asyncio.wait(tasks))
if __name__ == "__main__":
from sys import argv
how_many = int(argv[1]) if len(argv) == 2 else 100000
make_a_lot_of_files(how_many)
|
+ import asyncio
+
+ @asyncio.coroutine
def make_a_file(i):
f = "out_python/{:05d}".format(i)
try:
with open(f, "w") as fp:
fp.write("hello")
# print("wrote", f)
except:
print("failed on", f)
def make_a_lot_of_files(how_many):
- for i in range(how_many):
- make_a_file(i)
- return None
+ loop = asyncio.get_event_loop()
+ tasks = [asyncio.ensure_future(make_a_file(i)) for i in range(how_many)]
+ loop.run_until_complete(asyncio.wait(tasks))
- print(make_a_lot_of_files(100000))
+
+ if __name__ == "__main__":
+ from sys import argv
+ how_many = int(argv[1]) if len(argv) == 2 else 100000
+ make_a_lot_of_files(how_many)
|
e818860af87cad796699e27f8dfb4ff6fc9354e8
|
h2o-py/h2o/model/autoencoder.py
|
h2o-py/h2o/model/autoencoder.py
|
from model_base import *
from metrics_base import *
class H2OAutoEncoderModel(ModelBase):
"""
Class for AutoEncoder models.
"""
def __init__(self, dest_key, model_json):
super(H2OAutoEncoderModel, self).__init__(dest_key, model_json,H2OAutoEncoderModelMetrics)
def anomaly(self,test_data):
"""
Obtain the reconstruction error for the input test_data.
:param test_data: The dataset upon which the reconstruction error is computed.
:return: Return the reconstruction error.
"""
if not test_data: raise ValueError("Must specify test data")
j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True)
return h2o.get_frame(j["model_metrics"][0]["predictions"]["frame_id"]["name"])
|
from model_base import *
from metrics_base import *
class H2OAutoEncoderModel(ModelBase):
"""
Class for AutoEncoder models.
"""
def __init__(self, dest_key, model_json):
super(H2OAutoEncoderModel, self).__init__(dest_key, model_json,H2OAutoEncoderModelMetrics)
def anomaly(self,test_data,per_feature=False):
"""
Obtain the reconstruction error for the input test_data.
:param test_data: The dataset upon which the reconstruction error is computed.
:param per_feature: Whether to return the square reconstruction error per feature. Otherwise, return the mean square error.
:return: Return the reconstruction error.
"""
if not test_data: raise ValueError("Must specify test data")
j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True, reconstruction_error_per_feature=per_feature)
return h2o.get_frame(j["model_metrics"][0]["predictions"]["frame_id"]["name"])
|
Add extra argument to get per-feature reconstruction error for anomaly detection from Python.
|
PUBDEV-2078: Add extra argument to get per-feature reconstruction error for
anomaly detection from Python.
|
Python
|
apache-2.0
|
kyoren/https-github.com-h2oai-h2o-3,h2oai/h2o-3,mathemage/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,datachand/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,brightchen/h2o-3,mathemage/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,datachand/h2o-3,kyoren/https-github.com-h2oai-h2o-3,printedheart/h2o-3,pchmieli/h2o-3,madmax983/h2o-3,YzPaul3/h2o-3,datachand/h2o-3,YzPaul3/h2o-3,printedheart/h2o-3,kyoren/https-github.com-h2oai-h2o-3,junwucs/h2o-3,pchmieli/h2o-3,datachand/h2o-3,junwucs/h2o-3,mathemage/h2o-3,h2oai/h2o-3,printedheart/h2o-3,junwucs/h2o-3,kyoren/https-github.com-h2oai-h2o-3,YzPaul3/h2o-3,madmax983/h2o-3,michalkurka/h2o-3,junwucs/h2o-3,printedheart/h2o-3,datachand/h2o-3,pchmieli/h2o-3,michalkurka/h2o-3,printedheart/h2o-3,brightchen/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,madmax983/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,brightchen/h2o-3,pchmieli/h2o-3,brightchen/h2o-3,spennihana/h2o-3,junwucs/h2o-3,mathemage/h2o-3,printedheart/h2o-3,madmax983/h2o-3,h2oai/h2o-3,jangorecki/h2o-3,kyoren/https-github.com-h2oai-h2o-3,madmax983/h2o-3,datachand/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,brightchen/h2o-3,jangorecki/h2o-3,madmax983/h2o-3,junwucs/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,pchmieli/h2o-3,junwucs/h2o-3,mathemage/h2o-3,datachand/h2o-3,kyoren/https-github.com-h2oai-h2o-3,jangorecki/h2o-3,spennihana/h2o-3,spennihana/h2o-3,spennihana/h2o-3,madmax983/h2o-3,spennihana/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,brightchen/h2o-3,pchmieli/h2o-3,h2oai/h2o-dev,brightchen/h2o-3,kyoren/https-github.com-h2oai-h2o-3,michalkurka/h2o-3,pchmieli/h2o-3,mathemage/h2o-3,printedheart/h2o-3
|
from model_base import *
from metrics_base import *
class H2OAutoEncoderModel(ModelBase):
"""
Class for AutoEncoder models.
"""
def __init__(self, dest_key, model_json):
super(H2OAutoEncoderModel, self).__init__(dest_key, model_json,H2OAutoEncoderModelMetrics)
- def anomaly(self,test_data):
+ def anomaly(self,test_data,per_feature=False):
"""
Obtain the reconstruction error for the input test_data.
:param test_data: The dataset upon which the reconstruction error is computed.
+ :param per_feature: Whether to return the square reconstruction error per feature. Otherwise, return the mean square error.
:return: Return the reconstruction error.
"""
if not test_data: raise ValueError("Must specify test data")
- j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True)
+ j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True, reconstruction_error_per_feature=per_feature)
return h2o.get_frame(j["model_metrics"][0]["predictions"]["frame_id"]["name"])
|
Add extra argument to get per-feature reconstruction error for anomaly detection from Python.
|
## Code Before:
from model_base import *
from metrics_base import *
class H2OAutoEncoderModel(ModelBase):
"""
Class for AutoEncoder models.
"""
def __init__(self, dest_key, model_json):
super(H2OAutoEncoderModel, self).__init__(dest_key, model_json,H2OAutoEncoderModelMetrics)
def anomaly(self,test_data):
"""
Obtain the reconstruction error for the input test_data.
:param test_data: The dataset upon which the reconstruction error is computed.
:return: Return the reconstruction error.
"""
if not test_data: raise ValueError("Must specify test data")
j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True)
return h2o.get_frame(j["model_metrics"][0]["predictions"]["frame_id"]["name"])
## Instruction:
Add extra argument to get per-feature reconstruction error for anomaly detection from Python.
## Code After:
from model_base import *
from metrics_base import *
class H2OAutoEncoderModel(ModelBase):
"""
Class for AutoEncoder models.
"""
def __init__(self, dest_key, model_json):
super(H2OAutoEncoderModel, self).__init__(dest_key, model_json,H2OAutoEncoderModelMetrics)
def anomaly(self,test_data,per_feature=False):
"""
Obtain the reconstruction error for the input test_data.
:param test_data: The dataset upon which the reconstruction error is computed.
:param per_feature: Whether to return the square reconstruction error per feature. Otherwise, return the mean square error.
:return: Return the reconstruction error.
"""
if not test_data: raise ValueError("Must specify test data")
j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True, reconstruction_error_per_feature=per_feature)
return h2o.get_frame(j["model_metrics"][0]["predictions"]["frame_id"]["name"])
|
from model_base import *
from metrics_base import *
class H2OAutoEncoderModel(ModelBase):
"""
Class for AutoEncoder models.
"""
def __init__(self, dest_key, model_json):
super(H2OAutoEncoderModel, self).__init__(dest_key, model_json,H2OAutoEncoderModelMetrics)
- def anomaly(self,test_data):
+ def anomaly(self,test_data,per_feature=False):
? ++++++++++++++++++
"""
Obtain the reconstruction error for the input test_data.
:param test_data: The dataset upon which the reconstruction error is computed.
+ :param per_feature: Whether to return the square reconstruction error per feature. Otherwise, return the mean square error.
:return: Return the reconstruction error.
"""
if not test_data: raise ValueError("Must specify test data")
- j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True)
+ j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True, reconstruction_error_per_feature=per_feature)
? ++++++++++++++++++++++++++++++++++++++++++++++
return h2o.get_frame(j["model_metrics"][0]["predictions"]["frame_id"]["name"])
|
b3b85d3a481e4b2cf9df37666a9527ccf8a13bfc
|
build/fbcode_builder/specs/fbthrift.py
|
build/fbcode_builder/specs/fbthrift.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.fmt as fmt
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, fmt, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
Migrate from Folly Format to fmt
|
Migrate from Folly Format to fmt
Summary: Migrate from Folly Format to fmt which provides smaller compile times and per-call binary code size.
Reviewed By: alandau
Differential Revision: D14954926
fbshipit-source-id: 9d2c39e74a5d11e0f90c8ad0d71b79424c56747f
|
Python
|
unknown
|
phoad/rsocket-cpp,rsocket/rsocket-cpp,ReactiveSocket/reactivesocket-cpp,rsocket/rsocket-cpp,phoad/rsocket-cpp,ReactiveSocket/reactivesocket-cpp,phoad/rsocket-cpp,phoad/rsocket-cpp,rsocket/rsocket-cpp,rsocket/rsocket-cpp,phoad/rsocket-cpp,ReactiveSocket/reactivesocket-cpp
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
+ import specs.fmt as fmt
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
- 'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
+ 'depends_on': [folly, fizz, fmt, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
Migrate from Folly Format to fmt
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
## Instruction:
Migrate from Folly Format to fmt
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.fmt as fmt
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, fmt, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
+ import specs.fmt as fmt
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
- 'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
+ 'depends_on': [folly, fizz, fmt, sodium, rsocket, wangle, zstd],
? +++++
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
481028f075bf46696b8adc5904663e97bc883c52
|
notfound.py
|
notfound.py
|
from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
|
from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
self.error(404)
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
|
Return HTTP Status Code 404 for not found errors
|
Return HTTP Status Code 404 for not found errors
|
Python
|
mit
|
mback2k/appengine-oauth-profile,mback2k/appengine-oauth-profile
|
from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
+ self.error(404)
+
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
|
Return HTTP Status Code 404 for not found errors
|
## Code Before:
from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
## Instruction:
Return HTTP Status Code 404 for not found errors
## Code After:
from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
self.error(404)
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
|
from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
+ self.error(404)
+
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
|
faa1d74ccbfea43697c08db59db467b7a99ccb62
|
optimize/py/main.py
|
optimize/py/main.py
|
from scipy import optimize as o
import clean as c
def minimize(func, guess):
return o.minimize(func, guess)
def minimize_scalar(func, bracket=None, bounds=None, args=(), method='brent', tol=None, options=None):
return o.minimize_scalar(func, bracket, bounds, args, method, tol, options)
|
from scipy import optimize as o
import clean as c
def minimize(func, guess):
return o.minimize(func, guess)
def minimize_scalar(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
|
Allow options to be passed into minimization
|
Allow options to be passed into minimization
|
Python
|
mit
|
acjones617/scipy-node,acjones617/scipy-node
|
from scipy import optimize as o
import clean as c
def minimize(func, guess):
return o.minimize(func, guess)
- def minimize_scalar(func, bracket=None, bounds=None, args=(), method='brent', tol=None, options=None):
+
+
+ def minimize_scalar(func, options):
+ bracket = options['bracket']
+ bounds = options['bounds']
+ method = options['method']
+ tol = options['tol']
+ options = options['options']
+
- return o.minimize_scalar(func, bracket, bounds, args, method, tol, options)
+ return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
+
|
Allow options to be passed into minimization
|
## Code Before:
from scipy import optimize as o
import clean as c
def minimize(func, guess):
return o.minimize(func, guess)
def minimize_scalar(func, bracket=None, bounds=None, args=(), method='brent', tol=None, options=None):
return o.minimize_scalar(func, bracket, bounds, args, method, tol, options)
## Instruction:
Allow options to be passed into minimization
## Code After:
from scipy import optimize as o
import clean as c
def minimize(func, guess):
return o.minimize(func, guess)
def minimize_scalar(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
|
from scipy import optimize as o
import clean as c
def minimize(func, guess):
return o.minimize(func, guess)
- def minimize_scalar(func, bracket=None, bounds=None, args=(), method='brent', tol=None, options=None):
+
+
+ def minimize_scalar(func, options):
+ bracket = options['bracket']
+ bounds = options['bounds']
+ method = options['method']
+ tol = options['tol']
+ options = options['options']
+
- return o.minimize_scalar(func, bracket, bounds, args, method, tol, options)
? ^^^^^
+ return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
? ++++++++ ^^^^^^ +++++++ ++++ ++++++++
|
4b097d7d343523c99b50dc910b62bf29eb7c4081
|
vint/linting/policy/prohibit_implicit_scope_variable.py
|
vint/linting/policy/prohibit_implicit_scope_variable.py
|
from vint.ast.node_type import NodeType
from vint.linting.level import Level
from vint.linting.policy.abstract_policy import AbstractPolicy
from vint.linting.policy_registry import register_policy
from vint.ast.plugin.scope_plugin import ExplicityOfScopeVisibility
@register_policy
class ProhibitImplicitScopeVariable(AbstractPolicy):
def __init__(self):
super(ProhibitImplicitScopeVariable, self).__init__()
self.reference = 'Anti-pattern of vimrc (Scope of identifier)'
self.level = Level.STYLE_PROBLEM
def listen_node_types(self):
return [NodeType.IDENTIFIER]
def is_valid(self, identifier, lint_context):
""" Whether the identifier has a scope prefix. """
linter_config = lint_context['config']
scope_plugin = lint_context['plugins']['scope']
explicity = scope_plugin.get_explicity_of_scope_visibility(identifier)
is_autoload = scope_plugin.is_autoload_identifier(identifier)
try:
suppress_autoload = linter_config['policies'][self.name]['suppress_autoload']
except KeyError:
suppress_autoload = False
is_valid = (explicity is not ExplicityOfScopeVisibility.IMPLICIT or
is_autoload and suppress_autoload)
if not is_valid:
self._make_description(identifier, scope_plugin)
return is_valid
def _make_description(self, identifier, scope_plugin):
self.description = 'Make the scope explicit like `{good_example}`'.format(
good_example=scope_plugin.normalize_variable_name(identifier)
)
|
from vint.ast.node_type import NodeType
from vint.linting.level import Level
from vint.linting.policy.abstract_policy import AbstractPolicy
from vint.linting.policy_registry import register_policy
from vint.ast.plugin.scope_plugin import ExplicityOfScopeVisibility
@register_policy
class ProhibitImplicitScopeVariable(AbstractPolicy):
def __init__(self):
super(ProhibitImplicitScopeVariable, self).__init__()
self.reference = 'Anti-pattern of vimrc (Scope of identifier)'
self.level = Level.STYLE_PROBLEM
def listen_node_types(self):
return [NodeType.IDENTIFIER]
def is_valid(self, identifier, lint_context):
""" Whether the identifier has a scope prefix. """
scope_plugin = lint_context['plugins']['scope']
explicity = scope_plugin.get_explicity_of_scope_visibility(identifier)
is_autoload = scope_plugin.is_autoload_identifier(identifier)
config_dict = lint_context['config']
suppress_autoload = self.get_policy_option(config_dict, 'suppress_autoload', False)
is_valid = (explicity is not ExplicityOfScopeVisibility.IMPLICIT or
is_autoload and suppress_autoload)
if not is_valid:
self._make_description(identifier, scope_plugin)
return is_valid
def _make_description(self, identifier, scope_plugin):
self.description = 'Make the scope explicit like `{good_example}`'.format(
good_example=scope_plugin.normalize_variable_name(identifier)
)
|
Replace try..except with get_policy_option call
|
Replace try..except with get_policy_option call
|
Python
|
mit
|
RianFuro/vint,Kuniwak/vint,Kuniwak/vint,RianFuro/vint
|
from vint.ast.node_type import NodeType
from vint.linting.level import Level
from vint.linting.policy.abstract_policy import AbstractPolicy
from vint.linting.policy_registry import register_policy
from vint.ast.plugin.scope_plugin import ExplicityOfScopeVisibility
@register_policy
class ProhibitImplicitScopeVariable(AbstractPolicy):
def __init__(self):
super(ProhibitImplicitScopeVariable, self).__init__()
self.reference = 'Anti-pattern of vimrc (Scope of identifier)'
self.level = Level.STYLE_PROBLEM
def listen_node_types(self):
return [NodeType.IDENTIFIER]
def is_valid(self, identifier, lint_context):
""" Whether the identifier has a scope prefix. """
- linter_config = lint_context['config']
scope_plugin = lint_context['plugins']['scope']
explicity = scope_plugin.get_explicity_of_scope_visibility(identifier)
is_autoload = scope_plugin.is_autoload_identifier(identifier)
+ config_dict = lint_context['config']
+ suppress_autoload = self.get_policy_option(config_dict, 'suppress_autoload', False)
- try:
- suppress_autoload = linter_config['policies'][self.name]['suppress_autoload']
- except KeyError:
- suppress_autoload = False
is_valid = (explicity is not ExplicityOfScopeVisibility.IMPLICIT or
is_autoload and suppress_autoload)
if not is_valid:
self._make_description(identifier, scope_plugin)
return is_valid
def _make_description(self, identifier, scope_plugin):
self.description = 'Make the scope explicit like `{good_example}`'.format(
good_example=scope_plugin.normalize_variable_name(identifier)
)
|
Replace try..except with get_policy_option call
|
## Code Before:
from vint.ast.node_type import NodeType
from vint.linting.level import Level
from vint.linting.policy.abstract_policy import AbstractPolicy
from vint.linting.policy_registry import register_policy
from vint.ast.plugin.scope_plugin import ExplicityOfScopeVisibility
@register_policy
class ProhibitImplicitScopeVariable(AbstractPolicy):
def __init__(self):
super(ProhibitImplicitScopeVariable, self).__init__()
self.reference = 'Anti-pattern of vimrc (Scope of identifier)'
self.level = Level.STYLE_PROBLEM
def listen_node_types(self):
return [NodeType.IDENTIFIER]
def is_valid(self, identifier, lint_context):
""" Whether the identifier has a scope prefix. """
linter_config = lint_context['config']
scope_plugin = lint_context['plugins']['scope']
explicity = scope_plugin.get_explicity_of_scope_visibility(identifier)
is_autoload = scope_plugin.is_autoload_identifier(identifier)
try:
suppress_autoload = linter_config['policies'][self.name]['suppress_autoload']
except KeyError:
suppress_autoload = False
is_valid = (explicity is not ExplicityOfScopeVisibility.IMPLICIT or
is_autoload and suppress_autoload)
if not is_valid:
self._make_description(identifier, scope_plugin)
return is_valid
def _make_description(self, identifier, scope_plugin):
self.description = 'Make the scope explicit like `{good_example}`'.format(
good_example=scope_plugin.normalize_variable_name(identifier)
)
## Instruction:
Replace try..except with get_policy_option call
## Code After:
from vint.ast.node_type import NodeType
from vint.linting.level import Level
from vint.linting.policy.abstract_policy import AbstractPolicy
from vint.linting.policy_registry import register_policy
from vint.ast.plugin.scope_plugin import ExplicityOfScopeVisibility
@register_policy
class ProhibitImplicitScopeVariable(AbstractPolicy):
def __init__(self):
super(ProhibitImplicitScopeVariable, self).__init__()
self.reference = 'Anti-pattern of vimrc (Scope of identifier)'
self.level = Level.STYLE_PROBLEM
def listen_node_types(self):
return [NodeType.IDENTIFIER]
def is_valid(self, identifier, lint_context):
""" Whether the identifier has a scope prefix. """
scope_plugin = lint_context['plugins']['scope']
explicity = scope_plugin.get_explicity_of_scope_visibility(identifier)
is_autoload = scope_plugin.is_autoload_identifier(identifier)
config_dict = lint_context['config']
suppress_autoload = self.get_policy_option(config_dict, 'suppress_autoload', False)
is_valid = (explicity is not ExplicityOfScopeVisibility.IMPLICIT or
is_autoload and suppress_autoload)
if not is_valid:
self._make_description(identifier, scope_plugin)
return is_valid
def _make_description(self, identifier, scope_plugin):
self.description = 'Make the scope explicit like `{good_example}`'.format(
good_example=scope_plugin.normalize_variable_name(identifier)
)
|
from vint.ast.node_type import NodeType
from vint.linting.level import Level
from vint.linting.policy.abstract_policy import AbstractPolicy
from vint.linting.policy_registry import register_policy
from vint.ast.plugin.scope_plugin import ExplicityOfScopeVisibility
@register_policy
class ProhibitImplicitScopeVariable(AbstractPolicy):
def __init__(self):
super(ProhibitImplicitScopeVariable, self).__init__()
self.reference = 'Anti-pattern of vimrc (Scope of identifier)'
self.level = Level.STYLE_PROBLEM
def listen_node_types(self):
return [NodeType.IDENTIFIER]
def is_valid(self, identifier, lint_context):
""" Whether the identifier has a scope prefix. """
- linter_config = lint_context['config']
scope_plugin = lint_context['plugins']['scope']
explicity = scope_plugin.get_explicity_of_scope_visibility(identifier)
is_autoload = scope_plugin.is_autoload_identifier(identifier)
+ config_dict = lint_context['config']
+ suppress_autoload = self.get_policy_option(config_dict, 'suppress_autoload', False)
- try:
- suppress_autoload = linter_config['policies'][self.name]['suppress_autoload']
- except KeyError:
- suppress_autoload = False
is_valid = (explicity is not ExplicityOfScopeVisibility.IMPLICIT or
is_autoload and suppress_autoload)
if not is_valid:
self._make_description(identifier, scope_plugin)
return is_valid
def _make_description(self, identifier, scope_plugin):
self.description = 'Make the scope explicit like `{good_example}`'.format(
good_example=scope_plugin.normalize_variable_name(identifier)
)
|
13d9cf933e49849a3c5343e7bdbf887b9aee6097
|
busbus/entity.py
|
busbus/entity.py
|
from busbus import util
class LazyEntityProperty(object):
def __init__(self, f, *args, **kwargs):
self.f = f
self.args = args
self.kwargs = kwargs
def __call__(self):
return self.f(*self.args, **self.kwargs)
class BaseEntity(object):
def __init__(self, provider, **kwargs):
self._provider = provider
self._lazy_properties = {}
for attr in getattr(self, '__attrs__', []):
if isinstance(kwargs.get(attr, None), LazyEntityProperty):
self._lazy_properties[attr] = kwargs[attr]
else:
setattr(self, attr, kwargs.get(attr, None))
provider._new_entity(self)
def __repr__(self, args=['id']):
return u'<{0}({1})>'.format(
util.clsname(self),
','.join('{0}={1!r}'.format(i, getattr(self, i)) for i in args))
def __getattr__(self, name):
if name in self._lazy_properties:
value = self._lazy_properties[name]()
del self._lazy_properties[name]
setattr(self, name, value)
return value
else:
raise AttributeError(name)
def to_dict(self):
return dict((attr, getattr(self, attr)) for attr in self.__attrs__
if getattr(self, attr))
|
from busbus import util
class LazyEntityProperty(object):
def __init__(self, f, *args, **kwargs):
self.f = f
self.args = args
self.kwargs = kwargs
def __call__(self):
return self.f(*self.args, **self.kwargs)
class BaseEntity(object):
__repr_attrs__ = ('id',)
def __init__(self, provider, **kwargs):
self._provider = provider
self._lazy_properties = {}
for attr in getattr(self, '__attrs__', []):
if isinstance(kwargs.get(attr, None), LazyEntityProperty):
self._lazy_properties[attr] = kwargs[attr]
else:
setattr(self, attr, kwargs.get(attr, None))
provider._new_entity(self)
def __repr__(self):
return u'<{0}({1})>'.format(
util.clsname(self), ','.join(
'{0}={1!r}'.format(i, getattr(self, i))
for i in self.__repr_attrs__))
def __getattr__(self, name):
if name in self._lazy_properties:
value = self._lazy_properties[name]()
del self._lazy_properties[name]
setattr(self, name, value)
return value
else:
raise AttributeError(name)
def to_dict(self):
return dict((attr, getattr(self, attr)) for attr in self.__attrs__
if getattr(self, attr))
|
Use an instance variable instead of a non-standard argument to __repr__
|
Use an instance variable instead of a non-standard argument to __repr__
|
Python
|
mit
|
spaceboats/busbus
|
from busbus import util
class LazyEntityProperty(object):
def __init__(self, f, *args, **kwargs):
self.f = f
self.args = args
self.kwargs = kwargs
def __call__(self):
return self.f(*self.args, **self.kwargs)
class BaseEntity(object):
+ __repr_attrs__ = ('id',)
def __init__(self, provider, **kwargs):
self._provider = provider
self._lazy_properties = {}
for attr in getattr(self, '__attrs__', []):
if isinstance(kwargs.get(attr, None), LazyEntityProperty):
self._lazy_properties[attr] = kwargs[attr]
else:
setattr(self, attr, kwargs.get(attr, None))
provider._new_entity(self)
- def __repr__(self, args=['id']):
+ def __repr__(self):
return u'<{0}({1})>'.format(
- util.clsname(self),
+ util.clsname(self), ','.join(
- ','.join('{0}={1!r}'.format(i, getattr(self, i)) for i in args))
+ '{0}={1!r}'.format(i, getattr(self, i))
+ for i in self.__repr_attrs__))
def __getattr__(self, name):
if name in self._lazy_properties:
value = self._lazy_properties[name]()
del self._lazy_properties[name]
setattr(self, name, value)
return value
else:
raise AttributeError(name)
def to_dict(self):
return dict((attr, getattr(self, attr)) for attr in self.__attrs__
if getattr(self, attr))
|
Use an instance variable instead of a non-standard argument to __repr__
|
## Code Before:
from busbus import util
class LazyEntityProperty(object):
def __init__(self, f, *args, **kwargs):
self.f = f
self.args = args
self.kwargs = kwargs
def __call__(self):
return self.f(*self.args, **self.kwargs)
class BaseEntity(object):
def __init__(self, provider, **kwargs):
self._provider = provider
self._lazy_properties = {}
for attr in getattr(self, '__attrs__', []):
if isinstance(kwargs.get(attr, None), LazyEntityProperty):
self._lazy_properties[attr] = kwargs[attr]
else:
setattr(self, attr, kwargs.get(attr, None))
provider._new_entity(self)
def __repr__(self, args=['id']):
return u'<{0}({1})>'.format(
util.clsname(self),
','.join('{0}={1!r}'.format(i, getattr(self, i)) for i in args))
def __getattr__(self, name):
if name in self._lazy_properties:
value = self._lazy_properties[name]()
del self._lazy_properties[name]
setattr(self, name, value)
return value
else:
raise AttributeError(name)
def to_dict(self):
return dict((attr, getattr(self, attr)) for attr in self.__attrs__
if getattr(self, attr))
## Instruction:
Use an instance variable instead of a non-standard argument to __repr__
## Code After:
from busbus import util
class LazyEntityProperty(object):
def __init__(self, f, *args, **kwargs):
self.f = f
self.args = args
self.kwargs = kwargs
def __call__(self):
return self.f(*self.args, **self.kwargs)
class BaseEntity(object):
__repr_attrs__ = ('id',)
def __init__(self, provider, **kwargs):
self._provider = provider
self._lazy_properties = {}
for attr in getattr(self, '__attrs__', []):
if isinstance(kwargs.get(attr, None), LazyEntityProperty):
self._lazy_properties[attr] = kwargs[attr]
else:
setattr(self, attr, kwargs.get(attr, None))
provider._new_entity(self)
def __repr__(self):
return u'<{0}({1})>'.format(
util.clsname(self), ','.join(
'{0}={1!r}'.format(i, getattr(self, i))
for i in self.__repr_attrs__))
def __getattr__(self, name):
if name in self._lazy_properties:
value = self._lazy_properties[name]()
del self._lazy_properties[name]
setattr(self, name, value)
return value
else:
raise AttributeError(name)
def to_dict(self):
return dict((attr, getattr(self, attr)) for attr in self.__attrs__
if getattr(self, attr))
|
from busbus import util
class LazyEntityProperty(object):
def __init__(self, f, *args, **kwargs):
self.f = f
self.args = args
self.kwargs = kwargs
def __call__(self):
return self.f(*self.args, **self.kwargs)
class BaseEntity(object):
+ __repr_attrs__ = ('id',)
def __init__(self, provider, **kwargs):
self._provider = provider
self._lazy_properties = {}
for attr in getattr(self, '__attrs__', []):
if isinstance(kwargs.get(attr, None), LazyEntityProperty):
self._lazy_properties[attr] = kwargs[attr]
else:
setattr(self, attr, kwargs.get(attr, None))
provider._new_entity(self)
- def __repr__(self, args=['id']):
? -------------
+ def __repr__(self):
return u'<{0}({1})>'.format(
- util.clsname(self),
+ util.clsname(self), ','.join(
? ++++++++++
- ','.join('{0}={1!r}'.format(i, getattr(self, i)) for i in args))
? ^^^^^^^^^ ----------------
+ '{0}={1!r}'.format(i, getattr(self, i))
? ^^^^
+ for i in self.__repr_attrs__))
def __getattr__(self, name):
if name in self._lazy_properties:
value = self._lazy_properties[name]()
del self._lazy_properties[name]
setattr(self, name, value)
return value
else:
raise AttributeError(name)
def to_dict(self):
return dict((attr, getattr(self, attr)) for attr in self.__attrs__
if getattr(self, attr))
|
b0c3ef9a162109aa654de28d15f47d103ddbbf58
|
fireplace/cards/brawl/gift_exchange.py
|
fireplace/cards/brawl/gift_exchange.py
|
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
COST >= 5,
CLASS_CARD=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
# COST >= 5,
card_class=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
Drop cost filtering from TB_GiftExchange_Treasure_Spell
|
Drop cost filtering from TB_GiftExchange_Treasure_Spell
It doesn't work, and makes things harder than they need to be.
|
Python
|
agpl-3.0
|
Ragowit/fireplace,beheh/fireplace,smallnamespace/fireplace,NightKev/fireplace,smallnamespace/fireplace,Ragowit/fireplace,jleclanche/fireplace
|
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
- COST >= 5,
+ # COST >= 5,
- CLASS_CARD=Attr(Controller(OWNER), GameTag.CLASS)
+ card_class=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
Drop cost filtering from TB_GiftExchange_Treasure_Spell
|
## Code Before:
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
COST >= 5,
CLASS_CARD=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
## Instruction:
Drop cost filtering from TB_GiftExchange_Treasure_Spell
## Code After:
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
# COST >= 5,
card_class=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
- COST >= 5,
+ # COST >= 5,
? ++
- CLASS_CARD=Attr(Controller(OWNER), GameTag.CLASS)
? ^^^^^ ^^^^
+ card_class=Attr(Controller(OWNER), GameTag.CLASS)
? ^^^^ ^^^^^
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
4792515739c4ee671b86aeed39022ad8934d5d7c
|
artie/applications.py
|
artie/applications.py
|
import os
import re
import sys
import settings
triggers = set()
timers = set()
class BadApplicationError(Exception): pass
def trigger(expression):
def decorator(func):
triggers.add((re.compile(expression), func))
return decorator
def timer(time):
def decorator(func):
timers.add((time, func))
return decorator
sys.path.insert(0, settings.APPLICATION_PATH)
for filename in os.listdir(settings.APPLICATION_PATH):
if filename != '__init__.py' and filename[-3:] == '.py':
if filename == 'triggers.py':
raise BadApplicationException(
"Application file can't be called triggers.py"
)
module = filename[:-3]
if module in sys.modules:
reload(sys.modules[module])
else:
__import__(module, locals(), globals())
|
import os
import re
import sys
import settings
triggers = set()
timers = set()
class BadApplicationError(Exception): pass
def trigger(expression):
def decorator(func):
triggers.add((re.compile(expression), func))
return decorator
def timer(time):
def decorator(func):
timers.add((time, func))
return decorator
sys.path.insert(0, settings.APPLICATION_PATH)
for filename in os.listdir(settings.APPLICATION_PATH):
if filename != '__init__.py' and filename.endswith('.py'):
if filename == 'triggers.py':
raise BadApplicationException(
"Application file can't be called triggers.py"
)
module = filename[:-3]
if module in sys.modules:
reload(sys.modules[module])
else:
__import__(module, locals(), globals())
|
Use `endswith` instead of string indeces.
|
Use `endswith` instead of string indeces.
|
Python
|
mit
|
sumeet/artie
|
import os
import re
import sys
import settings
triggers = set()
timers = set()
class BadApplicationError(Exception): pass
def trigger(expression):
def decorator(func):
triggers.add((re.compile(expression), func))
return decorator
def timer(time):
def decorator(func):
timers.add((time, func))
return decorator
sys.path.insert(0, settings.APPLICATION_PATH)
for filename in os.listdir(settings.APPLICATION_PATH):
- if filename != '__init__.py' and filename[-3:] == '.py':
+ if filename != '__init__.py' and filename.endswith('.py'):
if filename == 'triggers.py':
raise BadApplicationException(
"Application file can't be called triggers.py"
)
module = filename[:-3]
if module in sys.modules:
reload(sys.modules[module])
else:
__import__(module, locals(), globals())
|
Use `endswith` instead of string indeces.
|
## Code Before:
import os
import re
import sys
import settings
triggers = set()
timers = set()
class BadApplicationError(Exception): pass
def trigger(expression):
def decorator(func):
triggers.add((re.compile(expression), func))
return decorator
def timer(time):
def decorator(func):
timers.add((time, func))
return decorator
sys.path.insert(0, settings.APPLICATION_PATH)
for filename in os.listdir(settings.APPLICATION_PATH):
if filename != '__init__.py' and filename[-3:] == '.py':
if filename == 'triggers.py':
raise BadApplicationException(
"Application file can't be called triggers.py"
)
module = filename[:-3]
if module in sys.modules:
reload(sys.modules[module])
else:
__import__(module, locals(), globals())
## Instruction:
Use `endswith` instead of string indeces.
## Code After:
import os
import re
import sys
import settings
triggers = set()
timers = set()
class BadApplicationError(Exception): pass
def trigger(expression):
def decorator(func):
triggers.add((re.compile(expression), func))
return decorator
def timer(time):
def decorator(func):
timers.add((time, func))
return decorator
sys.path.insert(0, settings.APPLICATION_PATH)
for filename in os.listdir(settings.APPLICATION_PATH):
if filename != '__init__.py' and filename.endswith('.py'):
if filename == 'triggers.py':
raise BadApplicationException(
"Application file can't be called triggers.py"
)
module = filename[:-3]
if module in sys.modules:
reload(sys.modules[module])
else:
__import__(module, locals(), globals())
|
import os
import re
import sys
import settings
triggers = set()
timers = set()
class BadApplicationError(Exception): pass
def trigger(expression):
def decorator(func):
triggers.add((re.compile(expression), func))
return decorator
def timer(time):
def decorator(func):
timers.add((time, func))
return decorator
sys.path.insert(0, settings.APPLICATION_PATH)
for filename in os.listdir(settings.APPLICATION_PATH):
- if filename != '__init__.py' and filename[-3:] == '.py':
? ^^^^^^^^^
+ if filename != '__init__.py' and filename.endswith('.py'):
? ^^^^^^^^^^ +
if filename == 'triggers.py':
raise BadApplicationException(
"Application file can't be called triggers.py"
)
module = filename[:-3]
if module in sys.modules:
reload(sys.modules[module])
else:
__import__(module, locals(), globals())
|
a90041e444edd8a88bc264db5b1a9305ba94d88f
|
commands/laws.py
|
commands/laws.py
|
@command("laws")
def echo(nick, user, channel, message):
argv = message.split(maxsplit=1)
if len(argv) == 0:
f = open('files/laws.txt', 'r')
i = 1
for line in f:
say(channel, '{}. {}'.format(i, line))
i = i + 1
f.close()
elif argv[0] == 'reset':
f = open('files/laws.txt', 'r+')
f.truncate()
f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n")
say(channel, '{}: Laws updated.'.format(nick))
f.close()
elif argv[0] == 'add' and len(argv) == 2:
f = open('files/laws.txt', 'a')
f.write("{}\n".format(argv[1]))
say(channel, '{}: Laws updated.'.format(nick))
f.close()
|
@command("laws")
def echo(nick, user, channel, message):
argv = message.split(maxsplit=1)
if len(argv) == 0:
try:
f = open('files/laws.txt', 'r')
for i,line in enumerate(f):
say(channel, '{}. {}'.format(i+1, line))
f.close()
except IOError:
say(channel,"Error: Coulh not open laws.txt!")
elif argv[0] == 'reset':
f = open('files/laws.txt', 'w')
f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n")
f.close()
say(channel, '{}: Laws updated.'.format(nick))
elif argv[0] == 'add' and len(argv) == 2:
f = open('files/laws.txt', 'a')
f.write("{}\n".format(argv[1]))
f.close()
say(channel, '{}: Laws updated.'.format(nick))
|
Handle file exceptions. Use enumerate. Err msg. Close files sooner.
|
Handle file exceptions. Use enumerate. Err msg. Close files sooner.
|
Python
|
unlicense
|
ccowmu/botler
|
@command("laws")
def echo(nick, user, channel, message):
argv = message.split(maxsplit=1)
if len(argv) == 0:
+ try:
- f = open('files/laws.txt', 'r')
+ f = open('files/laws.txt', 'r')
+ for i,line in enumerate(f):
- i = 1
- for line in f:
- say(channel, '{}. {}'.format(i, line))
+ say(channel, '{}. {}'.format(i+1, line))
- i = i + 1
- f.close()
+ f.close()
+ except IOError:
+ say(channel,"Error: Coulh not open laws.txt!")
elif argv[0] == 'reset':
- f = open('files/laws.txt', 'r+')
+ f = open('files/laws.txt', 'w')
- f.truncate()
f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n")
+ f.close()
say(channel, '{}: Laws updated.'.format(nick))
- f.close()
elif argv[0] == 'add' and len(argv) == 2:
f = open('files/laws.txt', 'a')
f.write("{}\n".format(argv[1]))
+ f.close()
say(channel, '{}: Laws updated.'.format(nick))
- f.close()
|
Handle file exceptions. Use enumerate. Err msg. Close files sooner.
|
## Code Before:
@command("laws")
def echo(nick, user, channel, message):
argv = message.split(maxsplit=1)
if len(argv) == 0:
f = open('files/laws.txt', 'r')
i = 1
for line in f:
say(channel, '{}. {}'.format(i, line))
i = i + 1
f.close()
elif argv[0] == 'reset':
f = open('files/laws.txt', 'r+')
f.truncate()
f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n")
say(channel, '{}: Laws updated.'.format(nick))
f.close()
elif argv[0] == 'add' and len(argv) == 2:
f = open('files/laws.txt', 'a')
f.write("{}\n".format(argv[1]))
say(channel, '{}: Laws updated.'.format(nick))
f.close()
## Instruction:
Handle file exceptions. Use enumerate. Err msg. Close files sooner.
## Code After:
@command("laws")
def echo(nick, user, channel, message):
argv = message.split(maxsplit=1)
if len(argv) == 0:
try:
f = open('files/laws.txt', 'r')
for i,line in enumerate(f):
say(channel, '{}. {}'.format(i+1, line))
f.close()
except IOError:
say(channel,"Error: Coulh not open laws.txt!")
elif argv[0] == 'reset':
f = open('files/laws.txt', 'w')
f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n")
f.close()
say(channel, '{}: Laws updated.'.format(nick))
elif argv[0] == 'add' and len(argv) == 2:
f = open('files/laws.txt', 'a')
f.write("{}\n".format(argv[1]))
f.close()
say(channel, '{}: Laws updated.'.format(nick))
|
@command("laws")
def echo(nick, user, channel, message):
argv = message.split(maxsplit=1)
if len(argv) == 0:
+ try:
- f = open('files/laws.txt', 'r')
+ f = open('files/laws.txt', 'r')
? ++++
+ for i,line in enumerate(f):
- i = 1
- for line in f:
- say(channel, '{}. {}'.format(i, line))
+ say(channel, '{}. {}'.format(i+1, line))
? ++++ ++
- i = i + 1
- f.close()
+ f.close()
? ++++
+ except IOError:
+ say(channel,"Error: Coulh not open laws.txt!")
elif argv[0] == 'reset':
- f = open('files/laws.txt', 'r+')
? ^^
+ f = open('files/laws.txt', 'w')
? ^
- f.truncate()
f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n")
+ f.close()
say(channel, '{}: Laws updated.'.format(nick))
- f.close()
elif argv[0] == 'add' and len(argv) == 2:
f = open('files/laws.txt', 'a')
f.write("{}\n".format(argv[1]))
+ f.close()
say(channel, '{}: Laws updated.'.format(nick))
- f.close()
|
265ed91b7e7f204926e7c5f9d2fbe76f447f7955
|
gitfs/views/read_only.py
|
gitfs/views/read_only.py
|
import os
from errno import EROFS
from fuse import FuseOSError
from gitfs import FuseMethodNotImplemented
from .view import View
class ReadOnlyView(View):
def getxattr(self, path, fh):
raise FuseMethodNotImplemented
def open(self, path, flags):
return 0
def create(self, path, fh):
raise FuseOSError(EROFS)
def write(self, path, fh):
raise FuseOSError(EROFS)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
return 0
def flush(self, path, fh):
return 0
def release(self, path, fh):
return 0
def access(self, path, amode):
return 0
def mkdir(self, path, mode):
raise FuseOSError(EROFS)
def utimens(self, path, times=None):
raise FuseOSError(EROFS)
def chown(self, path, uid, gid):
raise FuseOSError(EROFS)
|
import os
from errno import EROFS
from fuse import FuseOSError
from gitfs import FuseMethodNotImplemented
from .view import View
class ReadOnlyView(View):
def getxattr(self, path, fh):
raise FuseMethodNotImplemented
def open(self, path, flags):
return 0
def create(self, path, fh):
raise FuseOSError(EROFS)
def write(self, path, fh):
raise FuseOSError(EROFS)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
return 0
def flush(self, path, fh):
return 0
def release(self, path, fh):
return 0
def access(self, path, amode):
return 0
def mkdir(self, path, mode):
raise FuseOSError(EROFS)
def utimens(self, path, times=None):
raise FuseOSError(EROFS)
def chown(self, path, uid, gid):
raise FuseOSError(EROFS)
def chmod(self, path, mode):
raise FuseOSError(EROFS)
|
Raise read-only filesystem when the user wants to chmod in /history.
|
Raise read-only filesystem when the user wants to chmod in /history.
|
Python
|
apache-2.0
|
PressLabs/gitfs,bussiere/gitfs,rowhit/gitfs,ksmaheshkumar/gitfs,PressLabs/gitfs
|
import os
from errno import EROFS
from fuse import FuseOSError
from gitfs import FuseMethodNotImplemented
from .view import View
class ReadOnlyView(View):
def getxattr(self, path, fh):
raise FuseMethodNotImplemented
def open(self, path, flags):
return 0
def create(self, path, fh):
raise FuseOSError(EROFS)
def write(self, path, fh):
raise FuseOSError(EROFS)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
return 0
def flush(self, path, fh):
return 0
def release(self, path, fh):
return 0
def access(self, path, amode):
return 0
def mkdir(self, path, mode):
raise FuseOSError(EROFS)
def utimens(self, path, times=None):
raise FuseOSError(EROFS)
def chown(self, path, uid, gid):
raise FuseOSError(EROFS)
+ def chmod(self, path, mode):
+ raise FuseOSError(EROFS)
+
|
Raise read-only filesystem when the user wants to chmod in /history.
|
## Code Before:
import os
from errno import EROFS
from fuse import FuseOSError
from gitfs import FuseMethodNotImplemented
from .view import View
class ReadOnlyView(View):
def getxattr(self, path, fh):
raise FuseMethodNotImplemented
def open(self, path, flags):
return 0
def create(self, path, fh):
raise FuseOSError(EROFS)
def write(self, path, fh):
raise FuseOSError(EROFS)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
return 0
def flush(self, path, fh):
return 0
def release(self, path, fh):
return 0
def access(self, path, amode):
return 0
def mkdir(self, path, mode):
raise FuseOSError(EROFS)
def utimens(self, path, times=None):
raise FuseOSError(EROFS)
def chown(self, path, uid, gid):
raise FuseOSError(EROFS)
## Instruction:
Raise read-only filesystem when the user wants to chmod in /history.
## Code After:
import os
from errno import EROFS
from fuse import FuseOSError
from gitfs import FuseMethodNotImplemented
from .view import View
class ReadOnlyView(View):
def getxattr(self, path, fh):
raise FuseMethodNotImplemented
def open(self, path, flags):
return 0
def create(self, path, fh):
raise FuseOSError(EROFS)
def write(self, path, fh):
raise FuseOSError(EROFS)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
return 0
def flush(self, path, fh):
return 0
def release(self, path, fh):
return 0
def access(self, path, amode):
return 0
def mkdir(self, path, mode):
raise FuseOSError(EROFS)
def utimens(self, path, times=None):
raise FuseOSError(EROFS)
def chown(self, path, uid, gid):
raise FuseOSError(EROFS)
def chmod(self, path, mode):
raise FuseOSError(EROFS)
|
import os
from errno import EROFS
from fuse import FuseOSError
from gitfs import FuseMethodNotImplemented
from .view import View
class ReadOnlyView(View):
def getxattr(self, path, fh):
raise FuseMethodNotImplemented
def open(self, path, flags):
return 0
def create(self, path, fh):
raise FuseOSError(EROFS)
def write(self, path, fh):
raise FuseOSError(EROFS)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
return 0
def flush(self, path, fh):
return 0
def release(self, path, fh):
return 0
def access(self, path, amode):
return 0
def mkdir(self, path, mode):
raise FuseOSError(EROFS)
def utimens(self, path, times=None):
raise FuseOSError(EROFS)
def chown(self, path, uid, gid):
raise FuseOSError(EROFS)
+
+ def chmod(self, path, mode):
+ raise FuseOSError(EROFS)
|
461f23a52569067a26c18dbf70a830c0494c0342
|
deepchem/models/torch_models/__init__.py
|
deepchem/models/torch_models/__init__.py
|
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule
from deepchem.models.torch_models.cnn import CNN
|
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule, NeighborList
from deepchem.models.torch_models.cnn import CNN
|
Add layer to module imports
|
Add layer to module imports
|
Python
|
mit
|
deepchem/deepchem,deepchem/deepchem
|
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
- from deepchem.models.torch_models.layers import CNNModule
+ from deepchem.models.torch_models.layers import CNNModule, NeighborList
from deepchem.models.torch_models.cnn import CNN
|
Add layer to module imports
|
## Code Before:
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule
from deepchem.models.torch_models.cnn import CNN
## Instruction:
Add layer to module imports
## Code After:
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule, NeighborList
from deepchem.models.torch_models.cnn import CNN
|
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
- from deepchem.models.torch_models.layers import CNNModule
+ from deepchem.models.torch_models.layers import CNNModule, NeighborList
? ++++++++++++++
from deepchem.models.torch_models.cnn import CNN
|
59057c28746220cd0c9d9c78d4fe18b6480e8dda
|
vertica_python/vertica/messages/backend_messages/empty_query_response.py
|
vertica_python/vertica/messages/backend_messages/empty_query_response.py
|
from vertica_python.vertica.messages.message import BackendMessage
class EmptyQueryResponse(BackendMessage):
pass
EmptyQueryResponse._message_id(b'I')
|
from vertica_python.vertica.messages.message import BackendMessage
class EmptyQueryResponse(BackendMessage):
def __init__(self, data=None):
self.data = data
EmptyQueryResponse._message_id(b'I')
|
Add init for empty query response
|
Add init for empty query response
|
Python
|
apache-2.0
|
uber/vertica-python
|
from vertica_python.vertica.messages.message import BackendMessage
class EmptyQueryResponse(BackendMessage):
- pass
+ def __init__(self, data=None):
+ self.data = data
EmptyQueryResponse._message_id(b'I')
|
Add init for empty query response
|
## Code Before:
from vertica_python.vertica.messages.message import BackendMessage
class EmptyQueryResponse(BackendMessage):
pass
EmptyQueryResponse._message_id(b'I')
## Instruction:
Add init for empty query response
## Code After:
from vertica_python.vertica.messages.message import BackendMessage
class EmptyQueryResponse(BackendMessage):
def __init__(self, data=None):
self.data = data
EmptyQueryResponse._message_id(b'I')
|
from vertica_python.vertica.messages.message import BackendMessage
class EmptyQueryResponse(BackendMessage):
- pass
+ def __init__(self, data=None):
+ self.data = data
EmptyQueryResponse._message_id(b'I')
|
7c1538c9991badf205214e9f4e567cc4f1879ce6
|
pasta/base/ast_constants.py
|
pasta/base/ast_constants.py
|
"""Constants relevant to ast code."""
import ast
NODE_TYPE_TO_TOKENS = {
ast.Add: ('+',),
ast.Sub: ('-',),
ast.Mult: ('*',),
ast.Div: ('/',),
ast.Mod: ('%',),
ast.BitAnd: ('&',),
ast.BitOr: ('|',),
ast.BitXor: ('^',),
ast.FloorDiv: ('//',),
ast.Pow: ('**',),
ast.LShift: ('<<',),
ast.RShift: ('>>',),
ast.BitAnd: ('&',),
ast.BitOr: ('|',),
ast.BitXor: ('^',),
ast.FloorDiv: ('//',),
ast.Invert: ('~',),
ast.Not: ('not',),
ast.UAdd: ('+',),
ast.USub: ('-',),
ast.And: ('and',),
ast.Or: ('or',),
ast.Eq: ('==',),
ast.NotEq: ('!=',),
ast.Lt: ('<',),
ast.LtE: ('<=',),
ast.Gt: ('>',),
ast.GtE: ('>=',),
ast.Is: ('is',),
ast.IsNot: ('is', 'not',),
ast.In: ('in',),
ast.NotIn: ('not', 'in',),
}
if hasattr(ast, 'MatMult'):
NODE_TYPE_TO_TOKENS[ast.MatMult] = ('@',)
|
"""Constants relevant to ast code."""
import ast
NODE_TYPE_TO_TOKENS = {
ast.Add: ('+',),
ast.And: ('and',),
ast.BitAnd: ('&',),
ast.BitOr: ('|',),
ast.BitXor: ('^',),
ast.Div: ('/',),
ast.Eq: ('==',),
ast.FloorDiv: ('//',),
ast.Gt: ('>',),
ast.GtE: ('>=',),
ast.In: ('in',),
ast.Invert: ('~',),
ast.Is: ('is',),
ast.IsNot: ('is', 'not',),
ast.LShift: ('<<',),
ast.Lt: ('<',),
ast.LtE: ('<=',),
ast.Mod: ('%',),
ast.Mult: ('*',),
ast.Not: ('not',),
ast.NotEq: ('!=',),
ast.NotIn: ('not', 'in',),
ast.Or: ('or',),
ast.Pow: ('**',),
ast.RShift: ('>>',),
ast.Sub: ('-',),
ast.UAdd: ('+',),
ast.USub: ('-',),
}
if hasattr(ast, 'MatMult'):
NODE_TYPE_TO_TOKENS[ast.MatMult] = ('@',)
|
Sort ast nodes in constants + remove duplicates
|
Sort ast nodes in constants + remove duplicates
|
Python
|
apache-2.0
|
google/pasta
|
"""Constants relevant to ast code."""
import ast
NODE_TYPE_TO_TOKENS = {
ast.Add: ('+',),
- ast.Sub: ('-',),
- ast.Mult: ('*',),
- ast.Div: ('/',),
- ast.Mod: ('%',),
+ ast.And: ('and',),
ast.BitAnd: ('&',),
ast.BitOr: ('|',),
ast.BitXor: ('^',),
+ ast.Div: ('/',),
+ ast.Eq: ('==',),
ast.FloorDiv: ('//',),
+ ast.Gt: ('>',),
+ ast.GtE: ('>=',),
+ ast.In: ('in',),
+ ast.Invert: ('~',),
+ ast.Is: ('is',),
+ ast.IsNot: ('is', 'not',),
+ ast.LShift: ('<<',),
+ ast.Lt: ('<',),
+ ast.LtE: ('<=',),
+ ast.Mod: ('%',),
+ ast.Mult: ('*',),
+ ast.Not: ('not',),
+ ast.NotEq: ('!=',),
+ ast.NotIn: ('not', 'in',),
+ ast.Or: ('or',),
ast.Pow: ('**',),
- ast.LShift: ('<<',),
ast.RShift: ('>>',),
- ast.BitAnd: ('&',),
- ast.BitOr: ('|',),
+ ast.Sub: ('-',),
- ast.BitXor: ('^',),
- ast.FloorDiv: ('//',),
- ast.Invert: ('~',),
- ast.Not: ('not',),
ast.UAdd: ('+',),
ast.USub: ('-',),
- ast.And: ('and',),
- ast.Or: ('or',),
- ast.Eq: ('==',),
- ast.NotEq: ('!=',),
- ast.Lt: ('<',),
- ast.LtE: ('<=',),
- ast.Gt: ('>',),
- ast.GtE: ('>=',),
- ast.Is: ('is',),
- ast.IsNot: ('is', 'not',),
- ast.In: ('in',),
- ast.NotIn: ('not', 'in',),
}
if hasattr(ast, 'MatMult'):
NODE_TYPE_TO_TOKENS[ast.MatMult] = ('@',)
|
Sort ast nodes in constants + remove duplicates
|
## Code Before:
"""Constants relevant to ast code."""
import ast
NODE_TYPE_TO_TOKENS = {
ast.Add: ('+',),
ast.Sub: ('-',),
ast.Mult: ('*',),
ast.Div: ('/',),
ast.Mod: ('%',),
ast.BitAnd: ('&',),
ast.BitOr: ('|',),
ast.BitXor: ('^',),
ast.FloorDiv: ('//',),
ast.Pow: ('**',),
ast.LShift: ('<<',),
ast.RShift: ('>>',),
ast.BitAnd: ('&',),
ast.BitOr: ('|',),
ast.BitXor: ('^',),
ast.FloorDiv: ('//',),
ast.Invert: ('~',),
ast.Not: ('not',),
ast.UAdd: ('+',),
ast.USub: ('-',),
ast.And: ('and',),
ast.Or: ('or',),
ast.Eq: ('==',),
ast.NotEq: ('!=',),
ast.Lt: ('<',),
ast.LtE: ('<=',),
ast.Gt: ('>',),
ast.GtE: ('>=',),
ast.Is: ('is',),
ast.IsNot: ('is', 'not',),
ast.In: ('in',),
ast.NotIn: ('not', 'in',),
}
if hasattr(ast, 'MatMult'):
NODE_TYPE_TO_TOKENS[ast.MatMult] = ('@',)
## Instruction:
Sort ast nodes in constants + remove duplicates
## Code After:
"""Constants relevant to ast code."""
import ast
NODE_TYPE_TO_TOKENS = {
ast.Add: ('+',),
ast.And: ('and',),
ast.BitAnd: ('&',),
ast.BitOr: ('|',),
ast.BitXor: ('^',),
ast.Div: ('/',),
ast.Eq: ('==',),
ast.FloorDiv: ('//',),
ast.Gt: ('>',),
ast.GtE: ('>=',),
ast.In: ('in',),
ast.Invert: ('~',),
ast.Is: ('is',),
ast.IsNot: ('is', 'not',),
ast.LShift: ('<<',),
ast.Lt: ('<',),
ast.LtE: ('<=',),
ast.Mod: ('%',),
ast.Mult: ('*',),
ast.Not: ('not',),
ast.NotEq: ('!=',),
ast.NotIn: ('not', 'in',),
ast.Or: ('or',),
ast.Pow: ('**',),
ast.RShift: ('>>',),
ast.Sub: ('-',),
ast.UAdd: ('+',),
ast.USub: ('-',),
}
if hasattr(ast, 'MatMult'):
NODE_TYPE_TO_TOKENS[ast.MatMult] = ('@',)
|
"""Constants relevant to ast code."""
import ast
NODE_TYPE_TO_TOKENS = {
ast.Add: ('+',),
- ast.Sub: ('-',),
- ast.Mult: ('*',),
- ast.Div: ('/',),
- ast.Mod: ('%',),
? ^^ ^
+ ast.And: ('and',),
? ^^ ^^^
ast.BitAnd: ('&',),
ast.BitOr: ('|',),
ast.BitXor: ('^',),
+ ast.Div: ('/',),
+ ast.Eq: ('==',),
ast.FloorDiv: ('//',),
+ ast.Gt: ('>',),
+ ast.GtE: ('>=',),
+ ast.In: ('in',),
+ ast.Invert: ('~',),
+ ast.Is: ('is',),
+ ast.IsNot: ('is', 'not',),
+ ast.LShift: ('<<',),
+ ast.Lt: ('<',),
+ ast.LtE: ('<=',),
+ ast.Mod: ('%',),
+ ast.Mult: ('*',),
+ ast.Not: ('not',),
+ ast.NotEq: ('!=',),
+ ast.NotIn: ('not', 'in',),
+ ast.Or: ('or',),
ast.Pow: ('**',),
- ast.LShift: ('<<',),
ast.RShift: ('>>',),
- ast.BitAnd: ('&',),
- ast.BitOr: ('|',),
? ^^^^^ ^
+ ast.Sub: ('-',),
? ^^^ ^
- ast.BitXor: ('^',),
- ast.FloorDiv: ('//',),
- ast.Invert: ('~',),
- ast.Not: ('not',),
ast.UAdd: ('+',),
ast.USub: ('-',),
- ast.And: ('and',),
- ast.Or: ('or',),
- ast.Eq: ('==',),
- ast.NotEq: ('!=',),
- ast.Lt: ('<',),
- ast.LtE: ('<=',),
- ast.Gt: ('>',),
- ast.GtE: ('>=',),
- ast.Is: ('is',),
- ast.IsNot: ('is', 'not',),
- ast.In: ('in',),
- ast.NotIn: ('not', 'in',),
}
if hasattr(ast, 'MatMult'):
NODE_TYPE_TO_TOKENS[ast.MatMult] = ('@',)
|
4c4b09e1bfbd60bfe1453c5a3b3e8f13d2eaa4ce
|
comet/tcp/test/test_voeventsubscriber.py
|
comet/tcp/test/test_voeventsubscriber.py
|
from twisted.trial import unittest
from twisted.test import proto_helpers
from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN
from ..protocol import VOEventSubscriber
from ..protocol import VOEventSubscriberFactory
class VOEventSubscriberFactoryTestCase(unittest.TestCase):
def setUp(self):
factory = VOEventSubscriberFactory(DUMMY_IVORN)
self.proto = factory.buildProtocol(('127.0.0.1', 0))
self.proto.makeConnection(proto_helpers.StringTransport())
def test_protocol(self):
self.assertIsInstance(self.proto, VOEventSubscriber)
def tearDown(self):
self.proto.connectionLost()
|
from twisted.internet import task
from twisted.trial import unittest
from twisted.test import proto_helpers
from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN
from ..protocol import VOEventSubscriber
from ..protocol import VOEventSubscriberFactory
class VOEventSubscriberFactoryTestCase(unittest.TestCase):
def setUp(self):
factory = VOEventSubscriberFactory(DUMMY_IVORN)
self.proto = factory.buildProtocol(('127.0.0.1', 0))
self.proto.makeConnection(proto_helpers.StringTransport())
def test_protocol(self):
self.assertIsInstance(self.proto, VOEventSubscriber)
def tearDown(self):
self.proto.connectionLost()
class VOEventSubscriberTimeoutTestCase(unittest.TestCase):
def setUp(self):
factory = VOEventSubscriberFactory(DUMMY_IVORN)
self.proto = factory.buildProtocol(('127.0.0.1', 0))
self.clock = task.Clock()
self.proto.callLater = self.clock.callLater
self.tr = proto_helpers.StringTransport()
self.proto.makeConnection(self.tr)
def test_timeout(self):
self.clock.advance(self.proto.ALIVE_INTERVAL)
self.assertEqual(self.tr.disconnecting, True)
|
Add test for subscriber timeout
|
Add test for subscriber timeout
|
Python
|
bsd-2-clause
|
jdswinbank/Comet,jdswinbank/Comet
|
+ from twisted.internet import task
from twisted.trial import unittest
from twisted.test import proto_helpers
from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN
from ..protocol import VOEventSubscriber
from ..protocol import VOEventSubscriberFactory
class VOEventSubscriberFactoryTestCase(unittest.TestCase):
def setUp(self):
factory = VOEventSubscriberFactory(DUMMY_IVORN)
self.proto = factory.buildProtocol(('127.0.0.1', 0))
self.proto.makeConnection(proto_helpers.StringTransport())
def test_protocol(self):
self.assertIsInstance(self.proto, VOEventSubscriber)
def tearDown(self):
self.proto.connectionLost()
+ class VOEventSubscriberTimeoutTestCase(unittest.TestCase):
+ def setUp(self):
+ factory = VOEventSubscriberFactory(DUMMY_IVORN)
+ self.proto = factory.buildProtocol(('127.0.0.1', 0))
+ self.clock = task.Clock()
+ self.proto.callLater = self.clock.callLater
+ self.tr = proto_helpers.StringTransport()
+ self.proto.makeConnection(self.tr)
+
+ def test_timeout(self):
+ self.clock.advance(self.proto.ALIVE_INTERVAL)
+ self.assertEqual(self.tr.disconnecting, True)
+
|
Add test for subscriber timeout
|
## Code Before:
from twisted.trial import unittest
from twisted.test import proto_helpers
from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN
from ..protocol import VOEventSubscriber
from ..protocol import VOEventSubscriberFactory
class VOEventSubscriberFactoryTestCase(unittest.TestCase):
def setUp(self):
factory = VOEventSubscriberFactory(DUMMY_IVORN)
self.proto = factory.buildProtocol(('127.0.0.1', 0))
self.proto.makeConnection(proto_helpers.StringTransport())
def test_protocol(self):
self.assertIsInstance(self.proto, VOEventSubscriber)
def tearDown(self):
self.proto.connectionLost()
## Instruction:
Add test for subscriber timeout
## Code After:
from twisted.internet import task
from twisted.trial import unittest
from twisted.test import proto_helpers
from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN
from ..protocol import VOEventSubscriber
from ..protocol import VOEventSubscriberFactory
class VOEventSubscriberFactoryTestCase(unittest.TestCase):
def setUp(self):
factory = VOEventSubscriberFactory(DUMMY_IVORN)
self.proto = factory.buildProtocol(('127.0.0.1', 0))
self.proto.makeConnection(proto_helpers.StringTransport())
def test_protocol(self):
self.assertIsInstance(self.proto, VOEventSubscriber)
def tearDown(self):
self.proto.connectionLost()
class VOEventSubscriberTimeoutTestCase(unittest.TestCase):
def setUp(self):
factory = VOEventSubscriberFactory(DUMMY_IVORN)
self.proto = factory.buildProtocol(('127.0.0.1', 0))
self.clock = task.Clock()
self.proto.callLater = self.clock.callLater
self.tr = proto_helpers.StringTransport()
self.proto.makeConnection(self.tr)
def test_timeout(self):
self.clock.advance(self.proto.ALIVE_INTERVAL)
self.assertEqual(self.tr.disconnecting, True)
|
+ from twisted.internet import task
from twisted.trial import unittest
from twisted.test import proto_helpers
from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN
from ..protocol import VOEventSubscriber
from ..protocol import VOEventSubscriberFactory
class VOEventSubscriberFactoryTestCase(unittest.TestCase):
def setUp(self):
factory = VOEventSubscriberFactory(DUMMY_IVORN)
self.proto = factory.buildProtocol(('127.0.0.1', 0))
self.proto.makeConnection(proto_helpers.StringTransport())
def test_protocol(self):
self.assertIsInstance(self.proto, VOEventSubscriber)
def tearDown(self):
self.proto.connectionLost()
+
+ class VOEventSubscriberTimeoutTestCase(unittest.TestCase):
+ def setUp(self):
+ factory = VOEventSubscriberFactory(DUMMY_IVORN)
+ self.proto = factory.buildProtocol(('127.0.0.1', 0))
+ self.clock = task.Clock()
+ self.proto.callLater = self.clock.callLater
+ self.tr = proto_helpers.StringTransport()
+ self.proto.makeConnection(self.tr)
+
+ def test_timeout(self):
+ self.clock.advance(self.proto.ALIVE_INTERVAL)
+ self.assertEqual(self.tr.disconnecting, True)
|
09c3c511687de8888180577fa66f4ca51f4bc237
|
taggit_autosuggest_select2/views.py
|
taggit_autosuggest_select2/views.py
|
from django.conf import settings
from django.http import HttpResponse
from django.utils import simplejson as json
from taggit.models import Tag
MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)
def list_tags(request):
"""
Returns a list of JSON objects with a `name` and a `value` property that
all start like your query string `q` (not case sensitive).
"""
query = request.GET.get('q', '')
limit = request.GET.get('limit', MAX_SUGGESTIONS)
try:
request.GET.get('limit', MAX_SUGGESTIONS)
limit = min(int(limit), MAX_SUGGESTIONS) # max or less
except ValueError:
limit = MAX_SUGGESTIONS
tag_name_qs = Tag.objects.filter(name__istartswith=query).\
values_list('name', flat=True)
data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]
return HttpResponse(json.dumps(data), mimetype='application/json')
def list_all_tags(request):
"""Returns all the tags in the database"""
all_tags = Tag.objects.all().values_list('name', flat=True)
return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
|
from django.conf import settings
from django.http import HttpResponse
import json
from taggit.models import Tag
MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)
def list_tags(request):
"""
Returns a list of JSON objects with a `name` and a `value` property that
all start like your query string `q` (not case sensitive).
"""
query = request.GET.get('q', '')
limit = request.GET.get('limit', MAX_SUGGESTIONS)
try:
request.GET.get('limit', MAX_SUGGESTIONS)
limit = min(int(limit), MAX_SUGGESTIONS) # max or less
except ValueError:
limit = MAX_SUGGESTIONS
tag_name_qs = Tag.objects.filter(name__istartswith=query).\
values_list('name', flat=True)
data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]
return HttpResponse(json.dumps(data), mimetype='application/json')
def list_all_tags(request):
"""Returns all the tags in the database"""
all_tags = Tag.objects.all().values_list('name', flat=True)
return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
|
Remove deprecated django json shim
|
Remove deprecated django json shim
|
Python
|
mit
|
iris-edu/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2,iris-edu/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,iris-edu/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2
|
from django.conf import settings
from django.http import HttpResponse
- from django.utils import simplejson as json
+ import json
from taggit.models import Tag
MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)
def list_tags(request):
"""
Returns a list of JSON objects with a `name` and a `value` property that
all start like your query string `q` (not case sensitive).
"""
query = request.GET.get('q', '')
limit = request.GET.get('limit', MAX_SUGGESTIONS)
try:
request.GET.get('limit', MAX_SUGGESTIONS)
limit = min(int(limit), MAX_SUGGESTIONS) # max or less
except ValueError:
limit = MAX_SUGGESTIONS
tag_name_qs = Tag.objects.filter(name__istartswith=query).\
values_list('name', flat=True)
data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]
return HttpResponse(json.dumps(data), mimetype='application/json')
def list_all_tags(request):
"""Returns all the tags in the database"""
all_tags = Tag.objects.all().values_list('name', flat=True)
return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
|
Remove deprecated django json shim
|
## Code Before:
from django.conf import settings
from django.http import HttpResponse
from django.utils import simplejson as json
from taggit.models import Tag
MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)
def list_tags(request):
"""
Returns a list of JSON objects with a `name` and a `value` property that
all start like your query string `q` (not case sensitive).
"""
query = request.GET.get('q', '')
limit = request.GET.get('limit', MAX_SUGGESTIONS)
try:
request.GET.get('limit', MAX_SUGGESTIONS)
limit = min(int(limit), MAX_SUGGESTIONS) # max or less
except ValueError:
limit = MAX_SUGGESTIONS
tag_name_qs = Tag.objects.filter(name__istartswith=query).\
values_list('name', flat=True)
data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]
return HttpResponse(json.dumps(data), mimetype='application/json')
def list_all_tags(request):
"""Returns all the tags in the database"""
all_tags = Tag.objects.all().values_list('name', flat=True)
return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
## Instruction:
Remove deprecated django json shim
## Code After:
from django.conf import settings
from django.http import HttpResponse
import json
from taggit.models import Tag
MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)
def list_tags(request):
"""
Returns a list of JSON objects with a `name` and a `value` property that
all start like your query string `q` (not case sensitive).
"""
query = request.GET.get('q', '')
limit = request.GET.get('limit', MAX_SUGGESTIONS)
try:
request.GET.get('limit', MAX_SUGGESTIONS)
limit = min(int(limit), MAX_SUGGESTIONS) # max or less
except ValueError:
limit = MAX_SUGGESTIONS
tag_name_qs = Tag.objects.filter(name__istartswith=query).\
values_list('name', flat=True)
data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]
return HttpResponse(json.dumps(data), mimetype='application/json')
def list_all_tags(request):
"""Returns all the tags in the database"""
all_tags = Tag.objects.all().values_list('name', flat=True)
return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
|
from django.conf import settings
from django.http import HttpResponse
- from django.utils import simplejson as json
+ import json
from taggit.models import Tag
MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)
def list_tags(request):
"""
Returns a list of JSON objects with a `name` and a `value` property that
all start like your query string `q` (not case sensitive).
"""
query = request.GET.get('q', '')
limit = request.GET.get('limit', MAX_SUGGESTIONS)
try:
request.GET.get('limit', MAX_SUGGESTIONS)
limit = min(int(limit), MAX_SUGGESTIONS) # max or less
except ValueError:
limit = MAX_SUGGESTIONS
tag_name_qs = Tag.objects.filter(name__istartswith=query).\
values_list('name', flat=True)
data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]
return HttpResponse(json.dumps(data), mimetype='application/json')
def list_all_tags(request):
"""Returns all the tags in the database"""
all_tags = Tag.objects.all().values_list('name', flat=True)
return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
|
47b346404f29c89ddc7e85cd3833564593823449
|
zou/app/utils/query.py
|
zou/app/utils/query.py
|
def get_query_criterions_from_request(request):
"""
Turn request parameters into a dict where keys are attributes to filter and
values are values to filter.
"""
criterions = {}
for key, value in request.args.items():
if key not in ["page"]:
criterions[key] = value
return criterions
def get_page_from_request(request):
"""
Return page parameter value (given through request query or post body).
Default value is 1.
"""
return request.args.get("page", 1)
def apply_criterions_to_db_query(model, db_query, criterions):
"""
Apply criterions given in HTTP request to the sqlachemy db query object.
"""
if "name" in criterions and hasattr(model, "name"):
value = criterions["name"]
db_query = db_query.filter(model.name.ilike(value))
del criterions["name"]
return db_query.filter_by(**criterions)
|
import math
from zou.app import app
from zou.app.utils import fields
def get_query_criterions_from_request(request):
"""
Turn request parameters into a dict where keys are attributes to filter and
values are values to filter.
"""
criterions = {}
for key, value in request.args.items():
if key not in ["page"]:
criterions[key] = value
return criterions
def get_page_from_request(request):
"""
Return page parameter value (given through request query or post body).
Default value is 1.
"""
return request.args.get("page", 1)
def apply_criterions_to_db_query(model, db_query, criterions):
"""
Apply criterions given in HTTP request to the sqlachemy db query object.
"""
if "name" in criterions and hasattr(model, "name"):
value = criterions["name"]
db_query = db_query.filter(model.name.ilike(value))
del criterions["name"]
return db_query.filter_by(**criterions)
def get_paginated_results(query, page):
"""
Apply pagination to the query object.
"""
if page < 1:
entries = query.all()
return fields.serialize_list(entries)
else:
limit = app.config['NB_RECORDS_PER_PAGE']
total = query.count()
offset = (page - 1) * limit
nb_pages = int(math.ceil(total / float(limit)))
query = query.limit(limit)
query = query.offset(offset)
if (total < offset):
result = {
"data": [],
"total": 0,
"nb_pages": nb_pages,
"limit": limit,
"offset": offset,
"page": page
}
else:
result = {
"data": fields.serialize_list(query.all()),
"total": total,
"nb_pages": nb_pages,
"limit": limit,
"offset": offset,
"page": page
}
return result
|
Add helper to paginate results
|
Add helper to paginate results
|
Python
|
agpl-3.0
|
cgwire/zou
|
+ import math
+
+ from zou.app import app
+ from zou.app.utils import fields
+
+
def get_query_criterions_from_request(request):
"""
Turn request parameters into a dict where keys are attributes to filter and
values are values to filter.
"""
criterions = {}
for key, value in request.args.items():
if key not in ["page"]:
criterions[key] = value
return criterions
def get_page_from_request(request):
"""
Return page parameter value (given through request query or post body).
Default value is 1.
"""
return request.args.get("page", 1)
def apply_criterions_to_db_query(model, db_query, criterions):
"""
Apply criterions given in HTTP request to the sqlachemy db query object.
"""
if "name" in criterions and hasattr(model, "name"):
value = criterions["name"]
db_query = db_query.filter(model.name.ilike(value))
del criterions["name"]
return db_query.filter_by(**criterions)
+
+ def get_paginated_results(query, page):
+ """
+ Apply pagination to the query object.
+ """
+ if page < 1:
+ entries = query.all()
+ return fields.serialize_list(entries)
+ else:
+ limit = app.config['NB_RECORDS_PER_PAGE']
+ total = query.count()
+ offset = (page - 1) * limit
+
+ nb_pages = int(math.ceil(total / float(limit)))
+ query = query.limit(limit)
+ query = query.offset(offset)
+
+ if (total < offset):
+ result = {
+ "data": [],
+ "total": 0,
+ "nb_pages": nb_pages,
+ "limit": limit,
+ "offset": offset,
+ "page": page
+ }
+ else:
+ result = {
+ "data": fields.serialize_list(query.all()),
+ "total": total,
+ "nb_pages": nb_pages,
+ "limit": limit,
+ "offset": offset,
+ "page": page
+ }
+ return result
+
|
Add helper to paginate results
|
## Code Before:
def get_query_criterions_from_request(request):
"""
Turn request parameters into a dict where keys are attributes to filter and
values are values to filter.
"""
criterions = {}
for key, value in request.args.items():
if key not in ["page"]:
criterions[key] = value
return criterions
def get_page_from_request(request):
"""
Return page parameter value (given through request query or post body).
Default value is 1.
"""
return request.args.get("page", 1)
def apply_criterions_to_db_query(model, db_query, criterions):
"""
Apply criterions given in HTTP request to the sqlachemy db query object.
"""
if "name" in criterions and hasattr(model, "name"):
value = criterions["name"]
db_query = db_query.filter(model.name.ilike(value))
del criterions["name"]
return db_query.filter_by(**criterions)
## Instruction:
Add helper to paginate results
## Code After:
import math
from zou.app import app
from zou.app.utils import fields
def get_query_criterions_from_request(request):
"""
Turn request parameters into a dict where keys are attributes to filter and
values are values to filter.
"""
criterions = {}
for key, value in request.args.items():
if key not in ["page"]:
criterions[key] = value
return criterions
def get_page_from_request(request):
"""
Return page parameter value (given through request query or post body).
Default value is 1.
"""
return request.args.get("page", 1)
def apply_criterions_to_db_query(model, db_query, criterions):
"""
Apply criterions given in HTTP request to the sqlachemy db query object.
"""
if "name" in criterions and hasattr(model, "name"):
value = criterions["name"]
db_query = db_query.filter(model.name.ilike(value))
del criterions["name"]
return db_query.filter_by(**criterions)
def get_paginated_results(query, page):
"""
Apply pagination to the query object.
"""
if page < 1:
entries = query.all()
return fields.serialize_list(entries)
else:
limit = app.config['NB_RECORDS_PER_PAGE']
total = query.count()
offset = (page - 1) * limit
nb_pages = int(math.ceil(total / float(limit)))
query = query.limit(limit)
query = query.offset(offset)
if (total < offset):
result = {
"data": [],
"total": 0,
"nb_pages": nb_pages,
"limit": limit,
"offset": offset,
"page": page
}
else:
result = {
"data": fields.serialize_list(query.all()),
"total": total,
"nb_pages": nb_pages,
"limit": limit,
"offset": offset,
"page": page
}
return result
|
+ import math
+
+ from zou.app import app
+ from zou.app.utils import fields
+
+
def get_query_criterions_from_request(request):
"""
Turn request parameters into a dict where keys are attributes to filter and
values are values to filter.
"""
criterions = {}
for key, value in request.args.items():
if key not in ["page"]:
criterions[key] = value
return criterions
def get_page_from_request(request):
"""
Return page parameter value (given through request query or post body).
Default value is 1.
"""
return request.args.get("page", 1)
def apply_criterions_to_db_query(model, db_query, criterions):
"""
Apply criterions given in HTTP request to the sqlachemy db query object.
"""
if "name" in criterions and hasattr(model, "name"):
value = criterions["name"]
db_query = db_query.filter(model.name.ilike(value))
del criterions["name"]
return db_query.filter_by(**criterions)
+
+
+ def get_paginated_results(query, page):
+ """
+ Apply pagination to the query object.
+ """
+ if page < 1:
+ entries = query.all()
+ return fields.serialize_list(entries)
+ else:
+ limit = app.config['NB_RECORDS_PER_PAGE']
+ total = query.count()
+ offset = (page - 1) * limit
+
+ nb_pages = int(math.ceil(total / float(limit)))
+ query = query.limit(limit)
+ query = query.offset(offset)
+
+ if (total < offset):
+ result = {
+ "data": [],
+ "total": 0,
+ "nb_pages": nb_pages,
+ "limit": limit,
+ "offset": offset,
+ "page": page
+ }
+ else:
+ result = {
+ "data": fields.serialize_list(query.all()),
+ "total": total,
+ "nb_pages": nb_pages,
+ "limit": limit,
+ "offset": offset,
+ "page": page
+ }
+ return result
|
d9ab07c9c984d50ff93040d0220e4a3997e29f79
|
fluent_comments/email.py
|
fluent_comments/email.py
|
from django.conf import settings
from django.core.mail import send_mail
from django.template.loader import render_to_string
try:
from django.contrib.sites.shortcuts import get_current_site # Django 1.9+
except ImportError:
from django.contrib.sites.models import get_current_site
def send_comment_posted(comment, request):
"""
Send the email to staff that an comment was posted.
While the django_comments module has email support,
it doesn't pass the 'request' to the context.
This also changes the subject to show the page title.
"""
recipient_list = [manager_tuple[1] for manager_tuple in settings.MANAGERS]
site = get_current_site(request)
content_object = comment.content_object
content_title = str(content_object)
if comment.is_removed:
subject = u'[{0}] Spam comment on "{1}"'.format(site.name, content_title)
elif not comment.is_public:
subject = u'[{0}] Moderated comment on "{1}"'.format(site.name, content_title)
else:
subject = u'[{0}] New comment posted on "{1}"'.format(site.name, content_title)
context = {
'site': site,
'comment': comment,
'content_object': content_object
}
message = render_to_string("comments/comment_notification_email.txt", context, request=request)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, recipient_list, fail_silently=True)
|
from django.conf import settings
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.utils.encoding import force_text
try:
from django.contrib.sites.shortcuts import get_current_site # Django 1.9+
except ImportError:
from django.contrib.sites.models import get_current_site
def send_comment_posted(comment, request):
"""
Send the email to staff that an comment was posted.
While the django_comments module has email support,
it doesn't pass the 'request' to the context.
This also changes the subject to show the page title.
"""
recipient_list = [manager_tuple[1] for manager_tuple in settings.MANAGERS]
site = get_current_site(request)
content_object = comment.content_object
content_title = force_text(content_object)
if comment.is_removed:
subject = u'[{0}] Spam comment on "{1}"'.format(site.name, content_title)
elif not comment.is_public:
subject = u'[{0}] Moderated comment on "{1}"'.format(site.name, content_title)
else:
subject = u'[{0}] New comment posted on "{1}"'.format(site.name, content_title)
context = {
'site': site,
'comment': comment,
'content_object': content_object
}
message = render_to_string("comments/comment_notification_email.txt", context, request=request)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, recipient_list, fail_silently=True)
|
Use force_text() to get page title
|
Use force_text() to get page title
Some models might handle __unicode__/__str__ badly
|
Python
|
apache-2.0
|
edoburu/django-fluent-comments,django-fluent/django-fluent-comments,edoburu/django-fluent-comments,django-fluent/django-fluent-comments,django-fluent/django-fluent-comments,django-fluent/django-fluent-comments,edoburu/django-fluent-comments
|
from django.conf import settings
from django.core.mail import send_mail
from django.template.loader import render_to_string
+ from django.utils.encoding import force_text
try:
from django.contrib.sites.shortcuts import get_current_site # Django 1.9+
except ImportError:
from django.contrib.sites.models import get_current_site
def send_comment_posted(comment, request):
"""
Send the email to staff that an comment was posted.
While the django_comments module has email support,
it doesn't pass the 'request' to the context.
This also changes the subject to show the page title.
"""
recipient_list = [manager_tuple[1] for manager_tuple in settings.MANAGERS]
site = get_current_site(request)
content_object = comment.content_object
- content_title = str(content_object)
+ content_title = force_text(content_object)
if comment.is_removed:
subject = u'[{0}] Spam comment on "{1}"'.format(site.name, content_title)
elif not comment.is_public:
subject = u'[{0}] Moderated comment on "{1}"'.format(site.name, content_title)
else:
subject = u'[{0}] New comment posted on "{1}"'.format(site.name, content_title)
context = {
'site': site,
'comment': comment,
'content_object': content_object
}
message = render_to_string("comments/comment_notification_email.txt", context, request=request)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, recipient_list, fail_silently=True)
|
Use force_text() to get page title
|
## Code Before:
from django.conf import settings
from django.core.mail import send_mail
from django.template.loader import render_to_string
try:
from django.contrib.sites.shortcuts import get_current_site # Django 1.9+
except ImportError:
from django.contrib.sites.models import get_current_site
def send_comment_posted(comment, request):
"""
Send the email to staff that an comment was posted.
While the django_comments module has email support,
it doesn't pass the 'request' to the context.
This also changes the subject to show the page title.
"""
recipient_list = [manager_tuple[1] for manager_tuple in settings.MANAGERS]
site = get_current_site(request)
content_object = comment.content_object
content_title = str(content_object)
if comment.is_removed:
subject = u'[{0}] Spam comment on "{1}"'.format(site.name, content_title)
elif not comment.is_public:
subject = u'[{0}] Moderated comment on "{1}"'.format(site.name, content_title)
else:
subject = u'[{0}] New comment posted on "{1}"'.format(site.name, content_title)
context = {
'site': site,
'comment': comment,
'content_object': content_object
}
message = render_to_string("comments/comment_notification_email.txt", context, request=request)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, recipient_list, fail_silently=True)
## Instruction:
Use force_text() to get page title
## Code After:
from django.conf import settings
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.utils.encoding import force_text
try:
from django.contrib.sites.shortcuts import get_current_site # Django 1.9+
except ImportError:
from django.contrib.sites.models import get_current_site
def send_comment_posted(comment, request):
"""
Send the email to staff that an comment was posted.
While the django_comments module has email support,
it doesn't pass the 'request' to the context.
This also changes the subject to show the page title.
"""
recipient_list = [manager_tuple[1] for manager_tuple in settings.MANAGERS]
site = get_current_site(request)
content_object = comment.content_object
content_title = force_text(content_object)
if comment.is_removed:
subject = u'[{0}] Spam comment on "{1}"'.format(site.name, content_title)
elif not comment.is_public:
subject = u'[{0}] Moderated comment on "{1}"'.format(site.name, content_title)
else:
subject = u'[{0}] New comment posted on "{1}"'.format(site.name, content_title)
context = {
'site': site,
'comment': comment,
'content_object': content_object
}
message = render_to_string("comments/comment_notification_email.txt", context, request=request)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, recipient_list, fail_silently=True)
|
from django.conf import settings
from django.core.mail import send_mail
from django.template.loader import render_to_string
+ from django.utils.encoding import force_text
try:
from django.contrib.sites.shortcuts import get_current_site # Django 1.9+
except ImportError:
from django.contrib.sites.models import get_current_site
def send_comment_posted(comment, request):
"""
Send the email to staff that an comment was posted.
While the django_comments module has email support,
it doesn't pass the 'request' to the context.
This also changes the subject to show the page title.
"""
recipient_list = [manager_tuple[1] for manager_tuple in settings.MANAGERS]
site = get_current_site(request)
content_object = comment.content_object
- content_title = str(content_object)
? ^ ^
+ content_title = force_text(content_object)
? ^^^^^^ ^^^
if comment.is_removed:
subject = u'[{0}] Spam comment on "{1}"'.format(site.name, content_title)
elif not comment.is_public:
subject = u'[{0}] Moderated comment on "{1}"'.format(site.name, content_title)
else:
subject = u'[{0}] New comment posted on "{1}"'.format(site.name, content_title)
context = {
'site': site,
'comment': comment,
'content_object': content_object
}
message = render_to_string("comments/comment_notification_email.txt", context, request=request)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, recipient_list, fail_silently=True)
|
38845ecae177635b98a2e074355227f0c9f9834d
|
cartoframes/viz/widgets/__init__.py
|
cartoframes/viz/widgets/__init__.py
|
from __future__ import absolute_import
from .animation_widget import animation_widget
from .category_widget import category_widget
from .default_widget import default_widget
from .formula_widget import formula_widget
from .histogram_widget import histogram_widget
from .time_series_widget import time_series_widget
def _inspect(widget):
import inspect
lines = inspect.getsource(widget)
print(lines)
__all__ = [
'animation_widget',
'category_widget',
'default_widget',
'formula_widget',
'histogram_widget',
'time_series_widget',
]
|
from __future__ import absolute_import
from .animation_widget import animation_widget
from .category_widget import category_widget
from .default_widget import default_widget
from .formula_widget import formula_widget
from .histogram_widget import histogram_widget
from .time_series_widget import time_series_widget
from ..widget import Widget
from ..widget_list import WidgetList
def _inspect(widget):
import inspect
lines = inspect.getsource(widget)
print(lines)
__all__ = [
'Widget',
'WidgetList',
'animation_widget',
'category_widget',
'default_widget',
'formula_widget',
'histogram_widget',
'time_series_widget',
]
|
Add Widget and WidgetList to namespace
|
Add Widget and WidgetList to namespace
|
Python
|
bsd-3-clause
|
CartoDB/cartoframes,CartoDB/cartoframes
|
from __future__ import absolute_import
from .animation_widget import animation_widget
from .category_widget import category_widget
from .default_widget import default_widget
from .formula_widget import formula_widget
from .histogram_widget import histogram_widget
from .time_series_widget import time_series_widget
+ from ..widget import Widget
+ from ..widget_list import WidgetList
def _inspect(widget):
import inspect
lines = inspect.getsource(widget)
print(lines)
__all__ = [
+ 'Widget',
+ 'WidgetList',
'animation_widget',
'category_widget',
'default_widget',
'formula_widget',
'histogram_widget',
'time_series_widget',
]
|
Add Widget and WidgetList to namespace
|
## Code Before:
from __future__ import absolute_import
from .animation_widget import animation_widget
from .category_widget import category_widget
from .default_widget import default_widget
from .formula_widget import formula_widget
from .histogram_widget import histogram_widget
from .time_series_widget import time_series_widget
def _inspect(widget):
import inspect
lines = inspect.getsource(widget)
print(lines)
__all__ = [
'animation_widget',
'category_widget',
'default_widget',
'formula_widget',
'histogram_widget',
'time_series_widget',
]
## Instruction:
Add Widget and WidgetList to namespace
## Code After:
from __future__ import absolute_import
from .animation_widget import animation_widget
from .category_widget import category_widget
from .default_widget import default_widget
from .formula_widget import formula_widget
from .histogram_widget import histogram_widget
from .time_series_widget import time_series_widget
from ..widget import Widget
from ..widget_list import WidgetList
def _inspect(widget):
import inspect
lines = inspect.getsource(widget)
print(lines)
__all__ = [
'Widget',
'WidgetList',
'animation_widget',
'category_widget',
'default_widget',
'formula_widget',
'histogram_widget',
'time_series_widget',
]
|
from __future__ import absolute_import
from .animation_widget import animation_widget
from .category_widget import category_widget
from .default_widget import default_widget
from .formula_widget import formula_widget
from .histogram_widget import histogram_widget
from .time_series_widget import time_series_widget
+ from ..widget import Widget
+ from ..widget_list import WidgetList
def _inspect(widget):
import inspect
lines = inspect.getsource(widget)
print(lines)
__all__ = [
+ 'Widget',
+ 'WidgetList',
'animation_widget',
'category_widget',
'default_widget',
'formula_widget',
'histogram_widget',
'time_series_widget',
]
|
c197bf432655ca051ff4fb672cd41e876d539990
|
pipeline/api/api.py
|
pipeline/api/api.py
|
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
def json_load(data):
try:
return json.loads(data)
except json.decoder.JSONDecodeError:
raise falcon.HTTPBadRequest(None, 'invalid JSON')
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
def on_post(self, req, resp):
data = json_load(req.stream.read().decode('utf-8'))
data, errors = story_schema.load(data)
if errors:
raise falcon.HTTPBadRequest(None, errors)
story = models.Story.create(**data)
result = story_schema.dump(story)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
Allow creating and viewing stories
|
Allow creating and viewing stories
Closes #1
|
Python
|
mit
|
thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline
|
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
+ def json_load(data):
+ try:
+ return json.loads(data)
+ except json.decoder.JSONDecodeError:
+ raise falcon.HTTPBadRequest(None, 'invalid JSON')
+
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
+ def on_post(self, req, resp):
+ data = json_load(req.stream.read().decode('utf-8'))
+ data, errors = story_schema.load(data)
+ if errors:
+ raise falcon.HTTPBadRequest(None, errors)
+
+ story = models.Story.create(**data)
+ result = story_schema.dump(story)
+
+ resp.body = json_dump(result.data)
+
+
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
Allow creating and viewing stories
|
## Code Before:
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
## Instruction:
Allow creating and viewing stories
## Code After:
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
def json_load(data):
try:
return json.loads(data)
except json.decoder.JSONDecodeError:
raise falcon.HTTPBadRequest(None, 'invalid JSON')
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
def on_post(self, req, resp):
data = json_load(req.stream.read().decode('utf-8'))
data, errors = story_schema.load(data)
if errors:
raise falcon.HTTPBadRequest(None, errors)
story = models.Story.create(**data)
result = story_schema.dump(story)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
+ def json_load(data):
+ try:
+ return json.loads(data)
+ except json.decoder.JSONDecodeError:
+ raise falcon.HTTPBadRequest(None, 'invalid JSON')
+
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
+ def on_post(self, req, resp):
+ data = json_load(req.stream.read().decode('utf-8'))
+ data, errors = story_schema.load(data)
+ if errors:
+ raise falcon.HTTPBadRequest(None, errors)
+
+ story = models.Story.create(**data)
+ result = story_schema.dump(story)
+
+ resp.body = json_dump(result.data)
+
+
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
ab5edd504789e8fad3dcf0f30b0fbec8608e2abe
|
django_nyt/urls.py
|
django_nyt/urls.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
'', url('^json/get/$', 'django_nyt.views.get_notifications',
name='json_get'), url('^json/get/(?P<latest_id>\d+)/$',
'django_nyt.views.get_notifications', name='json_get'), url(
'^json/mark-read/$', 'django_nyt.views.mark_read',
name='json_mark_read_base'), url('^json/mark-read/(\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.conf.urls import url
urlpatterns = [
url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'),
url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),
]
if DJANGO_VERSION < (1, 8):
from django.conf.urls import patterns
urlpatterns = patterns('', *urlpatterns)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
Use list instead of patterns()
|
Use list instead of patterns()
|
Python
|
apache-2.0
|
benjaoming/django-nyt,benjaoming/django-nyt
|
from __future__ import absolute_import
from __future__ import unicode_literals
+ from django import VERSION as DJANGO_VERSION
- from django.conf.urls import patterns, url
+ from django.conf.urls import url
- urlpatterns = patterns(
+ urlpatterns = [
- '', url('^json/get/$', 'django_nyt.views.get_notifications',
+ url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'),
+ url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'),
- name='json_get'), url('^json/get/(?P<latest_id>\d+)/$',
- 'django_nyt.views.get_notifications', name='json_get'), url(
- '^json/mark-read/$', 'django_nyt.views.mark_read',
- name='json_mark_read_base'), url('^json/mark-read/(\d+)/$',
- 'django_nyt.views.mark_read', name='json_mark_read'), url(
- '^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$',
- 'django_nyt.views.mark_read', name='json_mark_read'), url(
+ url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'),
+ url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
+ url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
- '^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
+ url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
- url('^goto/$', 'django_nyt.views.goto', name='goto_base'),)
+ url('^goto/$', 'django_nyt.views.goto', name='goto_base'),
+ ]
+
+
+ if DJANGO_VERSION < (1, 8):
+ from django.conf.urls import patterns
+ urlpatterns = patterns('', *urlpatterns)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
Use list instead of patterns()
|
## Code Before:
from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
'', url('^json/get/$', 'django_nyt.views.get_notifications',
name='json_get'), url('^json/get/(?P<latest_id>\d+)/$',
'django_nyt.views.get_notifications', name='json_get'), url(
'^json/mark-read/$', 'django_nyt.views.mark_read',
name='json_mark_read_base'), url('^json/mark-read/(\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
## Instruction:
Use list instead of patterns()
## Code After:
from __future__ import absolute_import
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.conf.urls import url
urlpatterns = [
url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'),
url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),
]
if DJANGO_VERSION < (1, 8):
from django.conf.urls import patterns
urlpatterns = patterns('', *urlpatterns)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
from __future__ import absolute_import
from __future__ import unicode_literals
+ from django import VERSION as DJANGO_VERSION
- from django.conf.urls import patterns, url
? ----------
+ from django.conf.urls import url
- urlpatterns = patterns(
+ urlpatterns = [
- '', url('^json/get/$', 'django_nyt.views.get_notifications',
? ----
+ url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'),
? ++++++++++++++++++
+ url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'),
- name='json_get'), url('^json/get/(?P<latest_id>\d+)/$',
- 'django_nyt.views.get_notifications', name='json_get'), url(
- '^json/mark-read/$', 'django_nyt.views.mark_read',
- name='json_mark_read_base'), url('^json/mark-read/(\d+)/$',
- 'django_nyt.views.mark_read', name='json_mark_read'), url(
- '^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$',
- 'django_nyt.views.mark_read', name='json_mark_read'), url(
? ^^^ -----
+ url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'),
? ^^^^^^^^^^^^^^^^^^^^^^^^ +++++
+ url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
+ url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
- '^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
? ^^^^
+ url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
? ^^^^
- url('^goto/$', 'django_nyt.views.goto', name='goto_base'),)
? -
+ url('^goto/$', 'django_nyt.views.goto', name='goto_base'),
+ ]
+
+
+ if DJANGO_VERSION < (1, 8):
+ from django.conf.urls import patterns
+ urlpatterns = patterns('', *urlpatterns)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
00e9f7d239287896946511b81e2029a5db1f435c
|
scipy/fftpack/__init__.py
|
scipy/fftpack/__init__.py
|
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from realtransforms import *
__all__.extend(['dct', 'idct'])
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
Add dct and idct in scipy.fftpack namespace.
|
Add dct and idct in scipy.fftpack namespace.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5519 d6536bca-fef9-0310-8506-e4c0a848fbcf
|
Python
|
bsd-3-clause
|
scipy/scipy-svn,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,scipy/scipy-svn,scipy/scipy-svn,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,scipy/scipy-svn,jasonmccampbell/scipy-refactor
|
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
+ from realtransforms import *
+ __all__.extend(['dct', 'idct'])
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
Add dct and idct in scipy.fftpack namespace.
|
## Code Before:
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
## Instruction:
Add dct and idct in scipy.fftpack namespace.
## Code After:
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from realtransforms import *
__all__.extend(['dct', 'idct'])
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
+ from realtransforms import *
+ __all__.extend(['dct', 'idct'])
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
a5b750b9800b60242e72d9d066a46f98b8a0325e
|
test/test_recordings.py
|
test/test_recordings.py
|
import pytest
import json
class TestRecordings:
def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper):
print("If a new user uploads a recording")
bob = helper.given_new_user(self, "bob_limit")
bobsGroup = helper.make_unique_group_name(self, "bobs_group")
bob.create_group(bobsGroup)
bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup)
recording = bobsDevice.upload_recording()
print("And then fetches it before it has been processed")
response = bob.get_recording_response(recording)
print(" The response should have a JWT for the raw file")
assert "downloadRawJWT" in response
print(" But the response should not have a JWT for the processed file")
assert "downloadFileJWT" not in response
|
import pytest
import json
class TestRecordings:
def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper):
print("If a new user uploads a recording")
bob = helper.given_new_user(self, "bob_limit")
bobsGroup = helper.make_unique_group_name(self, "bobs_group")
bob.create_group(bobsGroup)
bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup)
recording = bobsDevice.upload_recording()
print("And then fetches it before it has been processed")
response = bob.get_recording_response(recording)
print(" The response should have a JWT for the raw file")
assert "downloadRawJWT" in response
print(" But the response should not have a JWT for the processed file")
assert "downloadFileJWT" not in response
def test_recording_doesnt_include_file_key(self, helper):
print("If a new user uploads a recording")
bob = helper.given_new_user(self, "bob_limit")
bobsGroup = helper.make_unique_group_name(self, "bobs_group")
bob.create_group(bobsGroup)
bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup)
recording = bobsDevice.upload_recording()
print("And then fetches it")
recording_response = bob.get_recording(recording)
print(" The recording response should not contain the rawFileKey")
assert "rawFileKey" not in recording_response
|
Add test to make sure fileKey(s) aren't returned
|
Add test to make sure fileKey(s) aren't returned
|
Python
|
agpl-3.0
|
TheCacophonyProject/Full_Noise
|
import pytest
import json
class TestRecordings:
def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper):
print("If a new user uploads a recording")
bob = helper.given_new_user(self, "bob_limit")
bobsGroup = helper.make_unique_group_name(self, "bobs_group")
bob.create_group(bobsGroup)
bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup)
recording = bobsDevice.upload_recording()
print("And then fetches it before it has been processed")
response = bob.get_recording_response(recording)
print(" The response should have a JWT for the raw file")
assert "downloadRawJWT" in response
print(" But the response should not have a JWT for the processed file")
assert "downloadFileJWT" not in response
+ def test_recording_doesnt_include_file_key(self, helper):
+ print("If a new user uploads a recording")
+ bob = helper.given_new_user(self, "bob_limit")
+ bobsGroup = helper.make_unique_group_name(self, "bobs_group")
+ bob.create_group(bobsGroup)
+ bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup)
+ recording = bobsDevice.upload_recording()
+
+ print("And then fetches it")
+ recording_response = bob.get_recording(recording)
+
+ print(" The recording response should not contain the rawFileKey")
+ assert "rawFileKey" not in recording_response
+
|
Add test to make sure fileKey(s) aren't returned
|
## Code Before:
import pytest
import json
class TestRecordings:
def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper):
print("If a new user uploads a recording")
bob = helper.given_new_user(self, "bob_limit")
bobsGroup = helper.make_unique_group_name(self, "bobs_group")
bob.create_group(bobsGroup)
bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup)
recording = bobsDevice.upload_recording()
print("And then fetches it before it has been processed")
response = bob.get_recording_response(recording)
print(" The response should have a JWT for the raw file")
assert "downloadRawJWT" in response
print(" But the response should not have a JWT for the processed file")
assert "downloadFileJWT" not in response
## Instruction:
Add test to make sure fileKey(s) aren't returned
## Code After:
import pytest
import json
class TestRecordings:
def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper):
print("If a new user uploads a recording")
bob = helper.given_new_user(self, "bob_limit")
bobsGroup = helper.make_unique_group_name(self, "bobs_group")
bob.create_group(bobsGroup)
bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup)
recording = bobsDevice.upload_recording()
print("And then fetches it before it has been processed")
response = bob.get_recording_response(recording)
print(" The response should have a JWT for the raw file")
assert "downloadRawJWT" in response
print(" But the response should not have a JWT for the processed file")
assert "downloadFileJWT" not in response
def test_recording_doesnt_include_file_key(self, helper):
print("If a new user uploads a recording")
bob = helper.given_new_user(self, "bob_limit")
bobsGroup = helper.make_unique_group_name(self, "bobs_group")
bob.create_group(bobsGroup)
bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup)
recording = bobsDevice.upload_recording()
print("And then fetches it")
recording_response = bob.get_recording(recording)
print(" The recording response should not contain the rawFileKey")
assert "rawFileKey" not in recording_response
|
import pytest
import json
class TestRecordings:
def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper):
print("If a new user uploads a recording")
bob = helper.given_new_user(self, "bob_limit")
bobsGroup = helper.make_unique_group_name(self, "bobs_group")
bob.create_group(bobsGroup)
bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup)
recording = bobsDevice.upload_recording()
print("And then fetches it before it has been processed")
response = bob.get_recording_response(recording)
print(" The response should have a JWT for the raw file")
assert "downloadRawJWT" in response
print(" But the response should not have a JWT for the processed file")
assert "downloadFileJWT" not in response
+
+ def test_recording_doesnt_include_file_key(self, helper):
+ print("If a new user uploads a recording")
+ bob = helper.given_new_user(self, "bob_limit")
+ bobsGroup = helper.make_unique_group_name(self, "bobs_group")
+ bob.create_group(bobsGroup)
+ bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup)
+ recording = bobsDevice.upload_recording()
+
+ print("And then fetches it")
+ recording_response = bob.get_recording(recording)
+
+ print(" The recording response should not contain the rawFileKey")
+ assert "rawFileKey" not in recording_response
|
56327baa67d5f05551bc52a1c0466e8d8b905797
|
metrics.py
|
metrics.py
|
"""The metrics module implements functions assessing prediction error for specific purposes."""
import numpy as np
def trapz(x, y):
"""Trapezoidal rule for integrating
the curve defined by x-y pairs.
Assume x and y are in the range [0,1]
"""
assert len(x) == len(y), 'x and y need to be of same length'
x = np.concatenate([x, array([0.0, 1.0])])
y = np.concatenate([y, array([0.0, 1.0])])
sort_idx = np.argsort(x)
sx = x[sort_idx]
sy = y[sort_idx]
area = 0.0
for ix in range(len(x)-1):
area += 0.5*(sx[ix+1]-sx[ix])*(sy[ix+1]+sy[ix])
return area
|
"""The metrics module implements functions assessing prediction error for specific purposes."""
import numpy as np
def trapz(x, y):
"""Trapezoidal rule for integrating
the curve defined by x-y pairs.
Assume x and y are in the range [0,1]
"""
assert len(x) == len(y), 'x and y need to be of same length'
x = np.concatenate([x, np.array([0.0, 1.0])])
y = np.concatenate([y, np.array([0.0, 1.0])])
sort_idx = np.argsort(x)
sx = x[sort_idx]
sy = y[sort_idx]
area = 0.0
for ix in range(len(x) - 1):
area += 0.5 * (sx[ix + 1] - sx[ix]) * (sy[ix + 1] + sy[ix])
return area
|
Add the missing 'np.' before 'array'
|
Add the missing 'np.' before 'array'
|
Python
|
mit
|
ceshine/isml15-wed
|
"""The metrics module implements functions assessing prediction error for specific purposes."""
import numpy as np
+
def trapz(x, y):
"""Trapezoidal rule for integrating
the curve defined by x-y pairs.
Assume x and y are in the range [0,1]
"""
assert len(x) == len(y), 'x and y need to be of same length'
- x = np.concatenate([x, array([0.0, 1.0])])
+ x = np.concatenate([x, np.array([0.0, 1.0])])
- y = np.concatenate([y, array([0.0, 1.0])])
+ y = np.concatenate([y, np.array([0.0, 1.0])])
sort_idx = np.argsort(x)
sx = x[sort_idx]
sy = y[sort_idx]
area = 0.0
- for ix in range(len(x)-1):
+ for ix in range(len(x) - 1):
- area += 0.5*(sx[ix+1]-sx[ix])*(sy[ix+1]+sy[ix])
+ area += 0.5 * (sx[ix + 1] - sx[ix]) * (sy[ix + 1] + sy[ix])
return area
-
|
Add the missing 'np.' before 'array'
|
## Code Before:
"""The metrics module implements functions assessing prediction error for specific purposes."""
import numpy as np
def trapz(x, y):
"""Trapezoidal rule for integrating
the curve defined by x-y pairs.
Assume x and y are in the range [0,1]
"""
assert len(x) == len(y), 'x and y need to be of same length'
x = np.concatenate([x, array([0.0, 1.0])])
y = np.concatenate([y, array([0.0, 1.0])])
sort_idx = np.argsort(x)
sx = x[sort_idx]
sy = y[sort_idx]
area = 0.0
for ix in range(len(x)-1):
area += 0.5*(sx[ix+1]-sx[ix])*(sy[ix+1]+sy[ix])
return area
## Instruction:
Add the missing 'np.' before 'array'
## Code After:
"""The metrics module implements functions assessing prediction error for specific purposes."""
import numpy as np
def trapz(x, y):
"""Trapezoidal rule for integrating
the curve defined by x-y pairs.
Assume x and y are in the range [0,1]
"""
assert len(x) == len(y), 'x and y need to be of same length'
x = np.concatenate([x, np.array([0.0, 1.0])])
y = np.concatenate([y, np.array([0.0, 1.0])])
sort_idx = np.argsort(x)
sx = x[sort_idx]
sy = y[sort_idx]
area = 0.0
for ix in range(len(x) - 1):
area += 0.5 * (sx[ix + 1] - sx[ix]) * (sy[ix + 1] + sy[ix])
return area
|
"""The metrics module implements functions assessing prediction error for specific purposes."""
import numpy as np
+
def trapz(x, y):
"""Trapezoidal rule for integrating
the curve defined by x-y pairs.
Assume x and y are in the range [0,1]
"""
assert len(x) == len(y), 'x and y need to be of same length'
- x = np.concatenate([x, array([0.0, 1.0])])
+ x = np.concatenate([x, np.array([0.0, 1.0])])
? +++
- y = np.concatenate([y, array([0.0, 1.0])])
+ y = np.concatenate([y, np.array([0.0, 1.0])])
? +++
sort_idx = np.argsort(x)
sx = x[sort_idx]
sy = y[sort_idx]
area = 0.0
- for ix in range(len(x)-1):
+ for ix in range(len(x) - 1):
? + +
- area += 0.5*(sx[ix+1]-sx[ix])*(sy[ix+1]+sy[ix])
+ area += 0.5 * (sx[ix + 1] - sx[ix]) * (sy[ix + 1] + sy[ix])
? + + + + + + + + + + + +
return area
-
|
60352e8a3c41ec804ac1bd6b9f3af4bf611edc0b
|
profiles/views.py
|
profiles/views.py
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.views.generic import FormView, TemplateView
from django.utils.datastructures import MultiValueDictKeyError
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(FormView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.utils.datastructures import MultiValueDictKeyError
from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
Use an update view instead of form view
|
Use an update view instead of form view
|
Python
|
bsd-2-clause
|
incuna/django-extensible-profiles
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
- from django.views.generic import FormView, TemplateView
from django.utils.datastructures import MultiValueDictKeyError
+ from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
- class ProfileEdit(FormView):
+ class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
Use an update view instead of form view
|
## Code Before:
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.views.generic import FormView, TemplateView
from django.utils.datastructures import MultiValueDictKeyError
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(FormView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
## Instruction:
Use an update view instead of form view
## Code After:
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.utils.datastructures import MultiValueDictKeyError
from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
- from django.views.generic import FormView, TemplateView
from django.utils.datastructures import MultiValueDictKeyError
+ from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
- class ProfileEdit(FormView):
? ^^^^
+ class ProfileEdit(UpdateView):
? ^^^^^^
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
6d2d915d7bec4e4a8e733a073ec3dc79a1d06812
|
src/stop.py
|
src/stop.py
|
import os
import json
from flask import Flask
from flask import request
from flask import json
import services
app = Flask(__name__)
digitransitAPIService = services.DigitransitAPIService()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/test')
def digitransit_test():
return json.dumps(digitransitAPIService.get_stops(60.203978, 24.9633573))
@app.route('/stops', methods=['GET'])
def stops():
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
print(result)
return json.dumps(result)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000'))
|
import os
import json
from flask import Flask
from flask import make_response
from flask import request
from flask import json
import services
app = Flask(__name__)
digitransitAPIService = services.DigitransitAPIService()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/test')
def digitransit_test():
return json.dumps(digitransitAPIService.get_stops(60.203978, 24.9633573))
@app.route('/stops', methods=['GET'])
def stops():
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
resp = make_response(json.dumps(result))
resp.mimetype = 'application/json'
return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000'))
|
Set response content type of a json response to application/json
|
Set response content type of a json response to application/json
|
Python
|
mit
|
STOP2/stop2.0-backend,STOP2/stop2.0-backend
|
import os
import json
from flask import Flask
+ from flask import make_response
from flask import request
from flask import json
import services
app = Flask(__name__)
digitransitAPIService = services.DigitransitAPIService()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/test')
def digitransit_test():
return json.dumps(digitransitAPIService.get_stops(60.203978, 24.9633573))
@app.route('/stops', methods=['GET'])
def stops():
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
- print(result)
- return json.dumps(result)
+ resp = make_response(json.dumps(result))
+ resp.mimetype = 'application/json'
+ return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000'))
|
Set response content type of a json response to application/json
|
## Code Before:
import os
import json
from flask import Flask
from flask import request
from flask import json
import services
app = Flask(__name__)
digitransitAPIService = services.DigitransitAPIService()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/test')
def digitransit_test():
return json.dumps(digitransitAPIService.get_stops(60.203978, 24.9633573))
@app.route('/stops', methods=['GET'])
def stops():
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
print(result)
return json.dumps(result)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000'))
## Instruction:
Set response content type of a json response to application/json
## Code After:
import os
import json
from flask import Flask
from flask import make_response
from flask import request
from flask import json
import services
app = Flask(__name__)
digitransitAPIService = services.DigitransitAPIService()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/test')
def digitransit_test():
return json.dumps(digitransitAPIService.get_stops(60.203978, 24.9633573))
@app.route('/stops', methods=['GET'])
def stops():
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
resp = make_response(json.dumps(result))
resp.mimetype = 'application/json'
return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000'))
|
import os
import json
from flask import Flask
+ from flask import make_response
from flask import request
from flask import json
import services
app = Flask(__name__)
digitransitAPIService = services.DigitransitAPIService()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/test')
def digitransit_test():
return json.dumps(digitransitAPIService.get_stops(60.203978, 24.9633573))
@app.route('/stops', methods=['GET'])
def stops():
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
- print(result)
- return json.dumps(result)
+ resp = make_response(json.dumps(result))
+ resp.mimetype = 'application/json'
+ return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000'))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.