commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
59066fc1def071aa51a87a6393c8bdf34f081188
|
opps/core/__init__.py
|
opps/core/__init__.py
|
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Core')
settings.INSTALLED_APPS += (
'opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'django_thumbor',
'googl',
'redactor',
'static_sitemaps',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
# Opps
getattr(settings, 'OPPS_SHORT', 'googl')
getattr(settings, 'OPPS_SHORT_URL', 'googl.short.GooglUrlShort')
# Sitemap
if not hasattr(settings, 'STATICSITEMAPS_ROOT_SITEMAP'):
settings.STATICSITEMAPS_ROOT_SITEMAP = 'opps.sitemaps.feed.sitemaps'
# redactor
getattr(settings, 'REDACTOR_OPTIONS', {'lang': 'en'})
getattr(settings, 'REDACTOR_UPLOAD', 'uploads/')
# thumbor
getattr(settings, 'THUMBOR_SERVER', 'http://localhost:8888')
getattr(settings, 'THUMBOR_MEDIA_URL', 'http://localhost:8000/media')
getattr(settings, 'THUMBOR_SECURITY_KEY', '')
|
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Core')
settings.INSTALLED_APPS += (
'opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'django_thumbor',
'googl',
'redactor',
'static_sitemaps',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
# Opps
getattr(settings, 'OPPS_SHORT', 'googl')
getattr(settings, 'OPPS_SHORT_URL', 'googl.short.GooglUrlShort')
# Sitemap
if not hasattr(settings, 'STATICSITEMAPS_ROOT_SITEMAP'):
settings.STATICSITEMAPS_ROOT_SITEMAP = 'opps.sitemaps.feed.sitemaps'
# Haystack
getattr(settings, 'HAYSTACK_CONNECTIONS', {
'default': {'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'}})
# redactor
getattr(settings, 'REDACTOR_OPTIONS', {'lang': 'en'})
getattr(settings, 'REDACTOR_UPLOAD', 'uploads/')
# thumbor
getattr(settings, 'THUMBOR_SERVER', 'http://localhost:8888')
getattr(settings, 'THUMBOR_MEDIA_URL', 'http://localhost:8000/media')
getattr(settings, 'THUMBOR_SECURITY_KEY', '')
|
Add haystack connections simples engine om opps
|
Add haystack connections simples engine om opps
|
Python
|
mit
|
YACOWS/opps,jeanmask/opps,opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,opps/opps,opps/opps,YACOWS/opps,YACOWS/opps,opps/opps,williamroot/opps,YACOWS/opps,williamroot/opps,jeanmask/opps
|
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Core')
settings.INSTALLED_APPS += (
'opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'django_thumbor',
'googl',
'redactor',
'static_sitemaps',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
# Opps
getattr(settings, 'OPPS_SHORT', 'googl')
getattr(settings, 'OPPS_SHORT_URL', 'googl.short.GooglUrlShort')
# Sitemap
if not hasattr(settings, 'STATICSITEMAPS_ROOT_SITEMAP'):
settings.STATICSITEMAPS_ROOT_SITEMAP = 'opps.sitemaps.feed.sitemaps'
+ # Haystack
+ getattr(settings, 'HAYSTACK_CONNECTIONS', {
+ 'default': {'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'}})
+
# redactor
getattr(settings, 'REDACTOR_OPTIONS', {'lang': 'en'})
getattr(settings, 'REDACTOR_UPLOAD', 'uploads/')
# thumbor
getattr(settings, 'THUMBOR_SERVER', 'http://localhost:8888')
getattr(settings, 'THUMBOR_MEDIA_URL', 'http://localhost:8000/media')
getattr(settings, 'THUMBOR_SECURITY_KEY', '')
|
Add haystack connections simples engine om opps
|
## Code Before:
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Core')
settings.INSTALLED_APPS += (
'opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'django_thumbor',
'googl',
'redactor',
'static_sitemaps',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
# Opps
getattr(settings, 'OPPS_SHORT', 'googl')
getattr(settings, 'OPPS_SHORT_URL', 'googl.short.GooglUrlShort')
# Sitemap
if not hasattr(settings, 'STATICSITEMAPS_ROOT_SITEMAP'):
settings.STATICSITEMAPS_ROOT_SITEMAP = 'opps.sitemaps.feed.sitemaps'
# redactor
getattr(settings, 'REDACTOR_OPTIONS', {'lang': 'en'})
getattr(settings, 'REDACTOR_UPLOAD', 'uploads/')
# thumbor
getattr(settings, 'THUMBOR_SERVER', 'http://localhost:8888')
getattr(settings, 'THUMBOR_MEDIA_URL', 'http://localhost:8000/media')
getattr(settings, 'THUMBOR_SECURITY_KEY', '')
## Instruction:
Add haystack connections simples engine om opps
## Code After:
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Core')
settings.INSTALLED_APPS += (
'opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'django_thumbor',
'googl',
'redactor',
'static_sitemaps',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
# Opps
getattr(settings, 'OPPS_SHORT', 'googl')
getattr(settings, 'OPPS_SHORT_URL', 'googl.short.GooglUrlShort')
# Sitemap
if not hasattr(settings, 'STATICSITEMAPS_ROOT_SITEMAP'):
settings.STATICSITEMAPS_ROOT_SITEMAP = 'opps.sitemaps.feed.sitemaps'
# Haystack
getattr(settings, 'HAYSTACK_CONNECTIONS', {
'default': {'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'}})
# redactor
getattr(settings, 'REDACTOR_OPTIONS', {'lang': 'en'})
getattr(settings, 'REDACTOR_UPLOAD', 'uploads/')
# thumbor
getattr(settings, 'THUMBOR_SERVER', 'http://localhost:8888')
getattr(settings, 'THUMBOR_MEDIA_URL', 'http://localhost:8000/media')
getattr(settings, 'THUMBOR_SECURITY_KEY', '')
|
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Core')
settings.INSTALLED_APPS += (
'opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'django_thumbor',
'googl',
'redactor',
'static_sitemaps',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
# Opps
getattr(settings, 'OPPS_SHORT', 'googl')
getattr(settings, 'OPPS_SHORT_URL', 'googl.short.GooglUrlShort')
# Sitemap
if not hasattr(settings, 'STATICSITEMAPS_ROOT_SITEMAP'):
settings.STATICSITEMAPS_ROOT_SITEMAP = 'opps.sitemaps.feed.sitemaps'
+ # Haystack
+ getattr(settings, 'HAYSTACK_CONNECTIONS', {
+ 'default': {'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'}})
+
# redactor
getattr(settings, 'REDACTOR_OPTIONS', {'lang': 'en'})
getattr(settings, 'REDACTOR_UPLOAD', 'uploads/')
# thumbor
getattr(settings, 'THUMBOR_SERVER', 'http://localhost:8888')
getattr(settings, 'THUMBOR_MEDIA_URL', 'http://localhost:8000/media')
getattr(settings, 'THUMBOR_SECURITY_KEY', '')
|
366316b0ea20ae178670581b61c52c481682d2b0
|
cosmic_ray/operators/exception_replacer.py
|
cosmic_ray/operators/exception_replacer.py
|
import ast
import builtins
from .operator import Operator
class OutOfNoWhereException(Exception):
pass
setattr(builtins, OutOfNoWhereException.__name__, OutOfNoWhereException)
class ExceptionReplacer(Operator):
"""An operator that modifies exception handlers."""
def visit_ExceptHandler(self, node): # noqa
return self.visit_mutation_site(node)
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
except_id = OutOfNoWhereException.__name__
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
return new_node
|
import ast
import builtins
from .operator import Operator
class CosmicRayTestingException(Exception):
pass
setattr(builtins, CosmicRayTestingException.__name__, CosmicRayTestingException)
class ExceptionReplacer(Operator):
"""An operator that modifies exception handlers."""
def visit_ExceptHandler(self, node): # noqa
return self.visit_mutation_site(node)
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
except_id = CosmicRayTestingException.__name__
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
return new_node
|
Change exception name to CosmicRayTestingException
|
Change exception name to CosmicRayTestingException
|
Python
|
mit
|
sixty-north/cosmic-ray
|
import ast
import builtins
from .operator import Operator
- class OutOfNoWhereException(Exception):
+ class CosmicRayTestingException(Exception):
pass
- setattr(builtins, OutOfNoWhereException.__name__, OutOfNoWhereException)
+ setattr(builtins, CosmicRayTestingException.__name__, CosmicRayTestingException)
class ExceptionReplacer(Operator):
"""An operator that modifies exception handlers."""
def visit_ExceptHandler(self, node): # noqa
return self.visit_mutation_site(node)
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
- except_id = OutOfNoWhereException.__name__
+ except_id = CosmicRayTestingException.__name__
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
return new_node
|
Change exception name to CosmicRayTestingException
|
## Code Before:
import ast
import builtins
from .operator import Operator
class OutOfNoWhereException(Exception):
pass
setattr(builtins, OutOfNoWhereException.__name__, OutOfNoWhereException)
class ExceptionReplacer(Operator):
"""An operator that modifies exception handlers."""
def visit_ExceptHandler(self, node): # noqa
return self.visit_mutation_site(node)
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
except_id = OutOfNoWhereException.__name__
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
return new_node
## Instruction:
Change exception name to CosmicRayTestingException
## Code After:
import ast
import builtins
from .operator import Operator
class CosmicRayTestingException(Exception):
pass
setattr(builtins, CosmicRayTestingException.__name__, CosmicRayTestingException)
class ExceptionReplacer(Operator):
"""An operator that modifies exception handlers."""
def visit_ExceptHandler(self, node): # noqa
return self.visit_mutation_site(node)
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
except_id = CosmicRayTestingException.__name__
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
return new_node
|
import ast
import builtins
from .operator import Operator
- class OutOfNoWhereException(Exception):
+ class CosmicRayTestingException(Exception):
pass
- setattr(builtins, OutOfNoWhereException.__name__, OutOfNoWhereException)
+ setattr(builtins, CosmicRayTestingException.__name__, CosmicRayTestingException)
class ExceptionReplacer(Operator):
"""An operator that modifies exception handlers."""
def visit_ExceptHandler(self, node): # noqa
return self.visit_mutation_site(node)
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
- except_id = OutOfNoWhereException.__name__
? ^^ ^^^^^^^^^
+ except_id = CosmicRayTestingException.__name__
? ^^^^^^^^^^^^ ^^^
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
return new_node
|
fb792452d27be4c6015f417520c600a4b902b721
|
learning_journal/tests/test_views.py
|
learning_journal/tests/test_views.py
|
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
|
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
def test_no_access_to_view(app):
response = app.get('/login')
assert response.status_code == 403
|
Add test to assert no access to app
|
Add test to assert no access to app
|
Python
|
mit
|
DZwell/learning_journal,DZwell/learning_journal,DZwell/learning_journal
|
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
+
+ def test_no_access_to_view(app):
+ response = app.get('/login')
+ assert response.status_code == 403
+
|
Add test to assert no access to app
|
## Code Before:
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
## Instruction:
Add test to assert no access to app
## Code After:
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
def test_no_access_to_view(app):
response = app.get('/login')
assert response.status_code == 403
|
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
+
+
+ def test_no_access_to_view(app):
+ response = app.get('/login')
+ assert response.status_code == 403
|
5af9f2cd214f12e2d16b696a0c62856e389b1397
|
test/test_doc.py
|
test/test_doc.py
|
import types
from mpi4py import MPI
import mpiunittest as unittest
ModuleType = type(MPI)
ClassType = type(MPI.Comm)
FunctionType = type(MPI.Init)
MethodDescrType = type(MPI.Comm.Get_rank)
GetSetDescrType = type(MPI.Comm.rank)
def getdocstr(mc, docstrings):
if type(mc) in (ModuleType, ClassType):
name = getattr(mc, '__name__')
if name in ('__builtin__', 'builtin'): return
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
for k, v in vars(mc).items():
getdocstr(v, docstrings)
elif type(mc) in (FunctionType, MethodDescrType, GetSetDescrType):
name = getattr(mc, '__name__')
if name in ('__builtin__', 'builtin'): return
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
class TestDoc(unittest.TestCase):
def testDoc(self):
missing = False
docs = { }
getdocstr(MPI, docs)
for k in docs:
if not k.startswith('_'):
doc = docs[k]
if doc is None:
print ("'%s': missing docstring" % k)
missing = True
else:
doc = doc.strip()
if not doc:
print ("'%s': empty docstring" % k)
missing = True
self.assertFalse(missing)
if __name__ == '__main__':
unittest.main()
|
import types
from mpi4py import MPI
import mpiunittest as unittest
ModuleType = type(MPI)
ClassType = type(MPI.Comm)
FunctionType = type(MPI.Init)
MethodDescrType = type(MPI.Comm.Get_rank)
GetSetDescrType = type(MPI.Comm.rank)
def getdocstr(mc, docstrings, namespace=None):
name = getattr(mc, '__name__', None)
if name is None: return
if name in ('__builtin__', 'builtins'): return
if name.startswith('_'): return
if namespace: name = '%s.%s' % (namespace, name)
if type(mc) in (ModuleType, ClassType):
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
for k, v in vars(mc).items():
getdocstr(v, docstrings, name)
elif type(mc) in (FunctionType, MethodDescrType, GetSetDescrType):
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
class TestDoc(unittest.TestCase):
def testDoc(self):
missing = False
docs = { }
getdocstr(MPI, docs)
for k in docs:
if not k.startswith('_'):
doc = docs[k]
if doc is None:
print ("'%s': missing docstring" % k)
missing = True
else:
doc = doc.strip()
if not doc:
print ("'%s': empty docstring" % k)
missing = True
self.assertFalse(missing)
if __name__ == '__main__':
unittest.main()
|
Improve test script, report namespaces for stuff missing docstrings
|
Improve test script, report namespaces for stuff missing docstrings
|
Python
|
bsd-2-clause
|
pressel/mpi4py,pressel/mpi4py,pressel/mpi4py,mpi4py/mpi4py,pressel/mpi4py,mpi4py/mpi4py,mpi4py/mpi4py
|
import types
from mpi4py import MPI
import mpiunittest as unittest
ModuleType = type(MPI)
ClassType = type(MPI.Comm)
FunctionType = type(MPI.Init)
MethodDescrType = type(MPI.Comm.Get_rank)
GetSetDescrType = type(MPI.Comm.rank)
- def getdocstr(mc, docstrings):
+ def getdocstr(mc, docstrings, namespace=None):
+ name = getattr(mc, '__name__', None)
+ if name is None: return
+ if name in ('__builtin__', 'builtins'): return
+ if name.startswith('_'): return
+ if namespace: name = '%s.%s' % (namespace, name)
if type(mc) in (ModuleType, ClassType):
- name = getattr(mc, '__name__')
- if name in ('__builtin__', 'builtin'): return
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
for k, v in vars(mc).items():
- getdocstr(v, docstrings)
+ getdocstr(v, docstrings, name)
elif type(mc) in (FunctionType, MethodDescrType, GetSetDescrType):
- name = getattr(mc, '__name__')
- if name in ('__builtin__', 'builtin'): return
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
class TestDoc(unittest.TestCase):
def testDoc(self):
missing = False
docs = { }
getdocstr(MPI, docs)
for k in docs:
if not k.startswith('_'):
doc = docs[k]
if doc is None:
print ("'%s': missing docstring" % k)
missing = True
else:
doc = doc.strip()
if not doc:
print ("'%s': empty docstring" % k)
missing = True
self.assertFalse(missing)
if __name__ == '__main__':
unittest.main()
|
Improve test script, report namespaces for stuff missing docstrings
|
## Code Before:
import types
from mpi4py import MPI
import mpiunittest as unittest
ModuleType = type(MPI)
ClassType = type(MPI.Comm)
FunctionType = type(MPI.Init)
MethodDescrType = type(MPI.Comm.Get_rank)
GetSetDescrType = type(MPI.Comm.rank)
def getdocstr(mc, docstrings):
if type(mc) in (ModuleType, ClassType):
name = getattr(mc, '__name__')
if name in ('__builtin__', 'builtin'): return
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
for k, v in vars(mc).items():
getdocstr(v, docstrings)
elif type(mc) in (FunctionType, MethodDescrType, GetSetDescrType):
name = getattr(mc, '__name__')
if name in ('__builtin__', 'builtin'): return
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
class TestDoc(unittest.TestCase):
def testDoc(self):
missing = False
docs = { }
getdocstr(MPI, docs)
for k in docs:
if not k.startswith('_'):
doc = docs[k]
if doc is None:
print ("'%s': missing docstring" % k)
missing = True
else:
doc = doc.strip()
if not doc:
print ("'%s': empty docstring" % k)
missing = True
self.assertFalse(missing)
if __name__ == '__main__':
unittest.main()
## Instruction:
Improve test script, report namespaces for stuff missing docstrings
## Code After:
import types
from mpi4py import MPI
import mpiunittest as unittest
ModuleType = type(MPI)
ClassType = type(MPI.Comm)
FunctionType = type(MPI.Init)
MethodDescrType = type(MPI.Comm.Get_rank)
GetSetDescrType = type(MPI.Comm.rank)
def getdocstr(mc, docstrings, namespace=None):
name = getattr(mc, '__name__', None)
if name is None: return
if name in ('__builtin__', 'builtins'): return
if name.startswith('_'): return
if namespace: name = '%s.%s' % (namespace, name)
if type(mc) in (ModuleType, ClassType):
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
for k, v in vars(mc).items():
getdocstr(v, docstrings, name)
elif type(mc) in (FunctionType, MethodDescrType, GetSetDescrType):
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
class TestDoc(unittest.TestCase):
def testDoc(self):
missing = False
docs = { }
getdocstr(MPI, docs)
for k in docs:
if not k.startswith('_'):
doc = docs[k]
if doc is None:
print ("'%s': missing docstring" % k)
missing = True
else:
doc = doc.strip()
if not doc:
print ("'%s': empty docstring" % k)
missing = True
self.assertFalse(missing)
if __name__ == '__main__':
unittest.main()
|
import types
from mpi4py import MPI
import mpiunittest as unittest
ModuleType = type(MPI)
ClassType = type(MPI.Comm)
FunctionType = type(MPI.Init)
MethodDescrType = type(MPI.Comm.Get_rank)
GetSetDescrType = type(MPI.Comm.rank)
- def getdocstr(mc, docstrings):
+ def getdocstr(mc, docstrings, namespace=None):
? ++++++++++++++++
+ name = getattr(mc, '__name__', None)
+ if name is None: return
+ if name in ('__builtin__', 'builtins'): return
+ if name.startswith('_'): return
+ if namespace: name = '%s.%s' % (namespace, name)
if type(mc) in (ModuleType, ClassType):
- name = getattr(mc, '__name__')
- if name in ('__builtin__', 'builtin'): return
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
for k, v in vars(mc).items():
- getdocstr(v, docstrings)
+ getdocstr(v, docstrings, name)
? ++++++
elif type(mc) in (FunctionType, MethodDescrType, GetSetDescrType):
- name = getattr(mc, '__name__')
- if name in ('__builtin__', 'builtin'): return
doc = getattr(mc, '__doc__', None)
docstrings[name] = doc
class TestDoc(unittest.TestCase):
def testDoc(self):
missing = False
docs = { }
getdocstr(MPI, docs)
for k in docs:
if not k.startswith('_'):
doc = docs[k]
if doc is None:
print ("'%s': missing docstring" % k)
missing = True
else:
doc = doc.strip()
if not doc:
print ("'%s': empty docstring" % k)
missing = True
self.assertFalse(missing)
if __name__ == '__main__':
unittest.main()
|
af3f0b520a868832f708e7692736005e6aee9c4b
|
core/admin.py
|
core/admin.py
|
from django.contrib import admin
from .models import FileUpload
class FileUploadAdmin(admin.ModelAdmin):
readonly_fields = ['owner']
def get_fields(self, request, obj=None):
if request.user.is_superuser:
return ['file', 'url_name', 'owner']
else:
return ['file', 'url_name']
def get_readonly_fields(self, request, obj=None):
if obj:
return self.readonly_fields + ['file', 'url_name']
return self.readonly_fields
def has_change_permission(self, request, obj=None):
has_class_permission = super().has_change_permission(request, obj)
if not has_class_permission:
return False
if obj is None:
return True
owns_object = request.user.id == obj.owner.id
if request.user.is_superuser or owns_object:
return True
return False
def has_delete_permission(self, request, obj=None):
return self.has_change_permission(request, obj)
def get_queryset(self, request):
if request.user.is_superuser:
return FileUpload.objects.all()
return FileUpload.objects.filter(owner=request.user)
def save_model(self, request, obj, form, change):
if not change:
obj.owner = request.user
obj.save()
admin.site.register(FileUpload, FileUploadAdmin)
|
from django.contrib import admin
from .models import FileUpload
class FileUploadAdmin(admin.ModelAdmin):
def get_changeform_initial_data(self, request):
return {'owner': request.user}
def get_fields(self, request, obj=None):
if request.user.is_superuser:
return ['file', 'url_name', 'owner']
else:
return ['file', 'url_name']
def get_readonly_fields(self, request, obj=None):
if obj:
return self.readonly_fields + ('file', 'url_name')
return self.readonly_fields
def has_change_permission(self, request, obj=None):
has_class_permission = super().has_change_permission(request, obj)
if not has_class_permission:
return False
if obj is None:
return True
owns_object = request.user.id == obj.owner.id
if request.user.is_superuser or owns_object:
return True
return False
def has_delete_permission(self, request, obj=None):
return self.has_change_permission(request, obj)
def get_queryset(self, request):
if request.user.is_superuser:
return FileUpload.objects.all()
return FileUpload.objects.filter(owner=request.user)
def save_model(self, request, obj, form, change):
if not change:
obj.owner = request.user
obj.save()
admin.site.register(FileUpload, FileUploadAdmin)
|
Make owner visible to superusers
|
Make owner visible to superusers
|
Python
|
mit
|
swarmer/files,swarmer/files
|
from django.contrib import admin
from .models import FileUpload
class FileUploadAdmin(admin.ModelAdmin):
- readonly_fields = ['owner']
+ def get_changeform_initial_data(self, request):
+ return {'owner': request.user}
def get_fields(self, request, obj=None):
if request.user.is_superuser:
return ['file', 'url_name', 'owner']
else:
return ['file', 'url_name']
def get_readonly_fields(self, request, obj=None):
if obj:
- return self.readonly_fields + ['file', 'url_name']
+ return self.readonly_fields + ('file', 'url_name')
return self.readonly_fields
def has_change_permission(self, request, obj=None):
has_class_permission = super().has_change_permission(request, obj)
if not has_class_permission:
return False
if obj is None:
return True
owns_object = request.user.id == obj.owner.id
if request.user.is_superuser or owns_object:
return True
return False
def has_delete_permission(self, request, obj=None):
return self.has_change_permission(request, obj)
def get_queryset(self, request):
if request.user.is_superuser:
return FileUpload.objects.all()
return FileUpload.objects.filter(owner=request.user)
def save_model(self, request, obj, form, change):
if not change:
obj.owner = request.user
obj.save()
admin.site.register(FileUpload, FileUploadAdmin)
|
Make owner visible to superusers
|
## Code Before:
from django.contrib import admin
from .models import FileUpload
class FileUploadAdmin(admin.ModelAdmin):
readonly_fields = ['owner']
def get_fields(self, request, obj=None):
if request.user.is_superuser:
return ['file', 'url_name', 'owner']
else:
return ['file', 'url_name']
def get_readonly_fields(self, request, obj=None):
if obj:
return self.readonly_fields + ['file', 'url_name']
return self.readonly_fields
def has_change_permission(self, request, obj=None):
has_class_permission = super().has_change_permission(request, obj)
if not has_class_permission:
return False
if obj is None:
return True
owns_object = request.user.id == obj.owner.id
if request.user.is_superuser or owns_object:
return True
return False
def has_delete_permission(self, request, obj=None):
return self.has_change_permission(request, obj)
def get_queryset(self, request):
if request.user.is_superuser:
return FileUpload.objects.all()
return FileUpload.objects.filter(owner=request.user)
def save_model(self, request, obj, form, change):
if not change:
obj.owner = request.user
obj.save()
admin.site.register(FileUpload, FileUploadAdmin)
## Instruction:
Make owner visible to superusers
## Code After:
from django.contrib import admin
from .models import FileUpload
class FileUploadAdmin(admin.ModelAdmin):
def get_changeform_initial_data(self, request):
return {'owner': request.user}
def get_fields(self, request, obj=None):
if request.user.is_superuser:
return ['file', 'url_name', 'owner']
else:
return ['file', 'url_name']
def get_readonly_fields(self, request, obj=None):
if obj:
return self.readonly_fields + ('file', 'url_name')
return self.readonly_fields
def has_change_permission(self, request, obj=None):
has_class_permission = super().has_change_permission(request, obj)
if not has_class_permission:
return False
if obj is None:
return True
owns_object = request.user.id == obj.owner.id
if request.user.is_superuser or owns_object:
return True
return False
def has_delete_permission(self, request, obj=None):
return self.has_change_permission(request, obj)
def get_queryset(self, request):
if request.user.is_superuser:
return FileUpload.objects.all()
return FileUpload.objects.filter(owner=request.user)
def save_model(self, request, obj, form, change):
if not change:
obj.owner = request.user
obj.save()
admin.site.register(FileUpload, FileUploadAdmin)
|
from django.contrib import admin
from .models import FileUpload
class FileUploadAdmin(admin.ModelAdmin):
- readonly_fields = ['owner']
+ def get_changeform_initial_data(self, request):
+ return {'owner': request.user}
def get_fields(self, request, obj=None):
if request.user.is_superuser:
return ['file', 'url_name', 'owner']
else:
return ['file', 'url_name']
def get_readonly_fields(self, request, obj=None):
if obj:
- return self.readonly_fields + ['file', 'url_name']
? ^ ^
+ return self.readonly_fields + ('file', 'url_name')
? ^ ^
return self.readonly_fields
def has_change_permission(self, request, obj=None):
has_class_permission = super().has_change_permission(request, obj)
if not has_class_permission:
return False
if obj is None:
return True
owns_object = request.user.id == obj.owner.id
if request.user.is_superuser or owns_object:
return True
return False
def has_delete_permission(self, request, obj=None):
return self.has_change_permission(request, obj)
def get_queryset(self, request):
if request.user.is_superuser:
return FileUpload.objects.all()
return FileUpload.objects.filter(owner=request.user)
def save_model(self, request, obj, form, change):
if not change:
obj.owner = request.user
obj.save()
admin.site.register(FileUpload, FileUploadAdmin)
|
91c3f218bdd5a660568238daa16c217501d39d05
|
create_database.py
|
create_database.py
|
import author
import commit
import config
import os
import pygit2
import sqlalchemy
repo = pygit2.Repository(config.REPO_PATH)
# Probably want to completly reset the DB
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=True)
config.BASE.metadata.create_all(engine)
|
from author import Author
from commit import Commit
import config
import os
import pygit2
import sqlalchemy
# If it exists and we want to reset the DB, remove the file
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=False)
config.BASE.metadata.create_all(engine)
Session = sqlalchemy.orm.sessionmaker(bind=engine)
session = Session()
repo = pygit2.Repository(config.REPO_PATH)
for commit in repo.walk(repo.head.target, pygit2.GIT_SORT_TIME):
author = session.query(Author).filter(Author.email == commit.author.email).first()
if not author:
author = Author(commit.author.name, commit.author.email)
session.add(author)
committer = session.query(Author).filter(Author.email == commit.committer.email).first()
if not committer:
committer = Author(commit.committer.name, commit.committer.email)
session.add(committer)
session.add(Commit(commit.message, commit.commit_time, committer.email, author.email))
session.commit()
|
Create database now properly loads all authors and commits into the repository
|
Create database now properly loads all authors and commits into the repository
|
Python
|
mit
|
mglidden/git-analysis,mglidden/git-analysis
|
- import author
- import commit
+ from author import Author
+ from commit import Commit
import config
import os
import pygit2
import sqlalchemy
+ # If it exists and we want to reset the DB, remove the file
- repo = pygit2.Repository(config.REPO_PATH)
-
- # Probably want to completly reset the DB
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
- engine = sqlalchemy.create_engine(config.DB_URL, echo=True)
+ engine = sqlalchemy.create_engine(config.DB_URL, echo=False)
config.BASE.metadata.create_all(engine)
+ Session = sqlalchemy.orm.sessionmaker(bind=engine)
+ session = Session()
+ repo = pygit2.Repository(config.REPO_PATH)
+ for commit in repo.walk(repo.head.target, pygit2.GIT_SORT_TIME):
+ author = session.query(Author).filter(Author.email == commit.author.email).first()
+ if not author:
+ author = Author(commit.author.name, commit.author.email)
+ session.add(author)
+ committer = session.query(Author).filter(Author.email == commit.committer.email).first()
+ if not committer:
+ committer = Author(commit.committer.name, commit.committer.email)
+ session.add(committer)
+
+ session.add(Commit(commit.message, commit.commit_time, committer.email, author.email))
+
+ session.commit()
+
|
Create database now properly loads all authors and commits into the repository
|
## Code Before:
import author
import commit
import config
import os
import pygit2
import sqlalchemy
repo = pygit2.Repository(config.REPO_PATH)
# Probably want to completly reset the DB
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=True)
config.BASE.metadata.create_all(engine)
## Instruction:
Create database now properly loads all authors and commits into the repository
## Code After:
from author import Author
from commit import Commit
import config
import os
import pygit2
import sqlalchemy
# If it exists and we want to reset the DB, remove the file
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=False)
config.BASE.metadata.create_all(engine)
Session = sqlalchemy.orm.sessionmaker(bind=engine)
session = Session()
repo = pygit2.Repository(config.REPO_PATH)
for commit in repo.walk(repo.head.target, pygit2.GIT_SORT_TIME):
author = session.query(Author).filter(Author.email == commit.author.email).first()
if not author:
author = Author(commit.author.name, commit.author.email)
session.add(author)
committer = session.query(Author).filter(Author.email == commit.committer.email).first()
if not committer:
committer = Author(commit.committer.name, commit.committer.email)
session.add(committer)
session.add(Commit(commit.message, commit.commit_time, committer.email, author.email))
session.commit()
|
- import author
- import commit
+ from author import Author
+ from commit import Commit
import config
import os
import pygit2
import sqlalchemy
+ # If it exists and we want to reset the DB, remove the file
- repo = pygit2.Repository(config.REPO_PATH)
-
- # Probably want to completly reset the DB
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
- engine = sqlalchemy.create_engine(config.DB_URL, echo=True)
? ^^^
+ engine = sqlalchemy.create_engine(config.DB_URL, echo=False)
? ^^^^
config.BASE.metadata.create_all(engine)
+ Session = sqlalchemy.orm.sessionmaker(bind=engine)
+ session = Session()
+ repo = pygit2.Repository(config.REPO_PATH)
+ for commit in repo.walk(repo.head.target, pygit2.GIT_SORT_TIME):
+ author = session.query(Author).filter(Author.email == commit.author.email).first()
+ if not author:
+ author = Author(commit.author.name, commit.author.email)
+ session.add(author)
+
+ committer = session.query(Author).filter(Author.email == commit.committer.email).first()
+ if not committer:
+ committer = Author(commit.committer.name, commit.committer.email)
+ session.add(committer)
+
+ session.add(Commit(commit.message, commit.commit_time, committer.email, author.email))
+
+ session.commit()
|
fb3a0db023161fbf5b08147dfac1b56989918bf6
|
tvseries/core/models.py
|
tvseries/core/models.py
|
from tvseries.ext import db
class TVSerie(db.Model):
__table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
description = db.Column(db.Text, nullable=True)
episodies_number = db.Column(db.Integer, nullable=False, default=1)
author = db.Column(db.String(50), nullable=False)
def __repr__(self):
if self.description:
self.description = "{0}...".format(self.description[0:10])
return ("TVSerie(id={!r}, name={!r}, "
"description={!r}, episodies_number={!r})").format(
self.id, self.name,
self.description,
self.episodies_number)
|
from tvseries.ext import db
class TVSerie(db.Model):
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
description = db.Column(db.Text, nullable=True)
episodies_number = db.Column(db.Integer, nullable=False, default=1)
author = db.Column(db.String(50), nullable=False)
def __repr__(self):
if self.description:
self.description = "{0}...".format(self.description[0:10])
return ("TVSerie(id={!r}, name={!r}, "
"description={!r}, episodies_number={!r})").format(
self.id, self.name,
self.description,
self.episodies_number)
|
Remove autoincrement sqlite paramether from model
|
Remove autoincrement sqlite paramether from model
|
Python
|
mit
|
rafaelhenrique/flask_tutorial,python-sorocaba/flask_tutorial,python-sorocaba/flask_tutorial,rafaelhenrique/flask_tutorial,python-sorocaba/flask_tutorial
|
from tvseries.ext import db
class TVSerie(db.Model):
- __table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
description = db.Column(db.Text, nullable=True)
episodies_number = db.Column(db.Integer, nullable=False, default=1)
author = db.Column(db.String(50), nullable=False)
def __repr__(self):
if self.description:
self.description = "{0}...".format(self.description[0:10])
return ("TVSerie(id={!r}, name={!r}, "
"description={!r}, episodies_number={!r})").format(
self.id, self.name,
self.description,
self.episodies_number)
|
Remove autoincrement sqlite paramether from model
|
## Code Before:
from tvseries.ext import db
class TVSerie(db.Model):
__table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
description = db.Column(db.Text, nullable=True)
episodies_number = db.Column(db.Integer, nullable=False, default=1)
author = db.Column(db.String(50), nullable=False)
def __repr__(self):
if self.description:
self.description = "{0}...".format(self.description[0:10])
return ("TVSerie(id={!r}, name={!r}, "
"description={!r}, episodies_number={!r})").format(
self.id, self.name,
self.description,
self.episodies_number)
## Instruction:
Remove autoincrement sqlite paramether from model
## Code After:
from tvseries.ext import db
class TVSerie(db.Model):
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
description = db.Column(db.Text, nullable=True)
episodies_number = db.Column(db.Integer, nullable=False, default=1)
author = db.Column(db.String(50), nullable=False)
def __repr__(self):
if self.description:
self.description = "{0}...".format(self.description[0:10])
return ("TVSerie(id={!r}, name={!r}, "
"description={!r}, episodies_number={!r})").format(
self.id, self.name,
self.description,
self.episodies_number)
|
from tvseries.ext import db
class TVSerie(db.Model):
- __table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
description = db.Column(db.Text, nullable=True)
episodies_number = db.Column(db.Integer, nullable=False, default=1)
author = db.Column(db.String(50), nullable=False)
def __repr__(self):
if self.description:
self.description = "{0}...".format(self.description[0:10])
return ("TVSerie(id={!r}, name={!r}, "
"description={!r}, episodies_number={!r})").format(
self.id, self.name,
self.description,
self.episodies_number)
|
4b665bb2e85994e3df0324afacb2453b8f4998a1
|
contact_map/tests/test_dask_runner.py
|
contact_map/tests/test_dask_runner.py
|
from .utils import *
from contact_map.dask_runner import *
class TestDaskContactFrequency(object):
def test_dask_integration(self):
# this is an integration test to check that dask works
dask = pytest.importorskip('dask') # pylint: disable=W0612
distributed = pytest.importorskip('dask.distributed')
# Explicitly set only 4 workers on Travis instead of 31
# Fix copied from https://github.com/spencerahill/aospy/pull/220/files
cluster = distributed.LocalCluster(n_workers=4)
client = distributed.Client(cluster)
filename = find_testfile("trajectory.pdb")
dask_freq = DaskContactFrequency(client, filename, cutoff=0.075,
n_neighbors_ignored=0)
client.close()
assert dask_freq.n_frames == 5
|
from .utils import *
from contact_map.dask_runner import *
def dask_setup_test_cluster(distributed, n_workers=4, n_attempts=3):
"""Set up a test cluster using dask.distributed. Try up to n_attempts
times, and skip the test if all attempts fail.
"""
cluster = None
for _ in range(n_attempts):
try:
cluster = distributed.LocalCluster(n_workers=n_workers)
except distributed.TimeoutError:
continue
else:
return cluster
# only get here if all retries fail
pytest.skip("Failed to set up distributed LocalCluster")
class TestDaskContactFrequency(object):
def test_dask_integration(self):
# this is an integration test to check that dask works
dask = pytest.importorskip('dask') # pylint: disable=W0612
distributed = pytest.importorskip('dask.distributed')
# Explicitly set only 4 workers on Travis instead of 31
# Fix copied from https://github.com/spencerahill/aospy/pull/220/files
cluster = dask_setup_test_cluster(distributed)
client = distributed.Client(cluster)
filename = find_testfile("trajectory.pdb")
dask_freq = DaskContactFrequency(client, filename, cutoff=0.075,
n_neighbors_ignored=0)
client.close()
assert dask_freq.n_frames == 5
|
Handle dask TimeoutError exception in tests
|
Handle dask TimeoutError exception in tests
|
Python
|
lgpl-2.1
|
dwhswenson/contact_map,dwhswenson/contact_map
|
from .utils import *
from contact_map.dask_runner import *
+
+ def dask_setup_test_cluster(distributed, n_workers=4, n_attempts=3):
+ """Set up a test cluster using dask.distributed. Try up to n_attempts
+ times, and skip the test if all attempts fail.
+ """
+ cluster = None
+ for _ in range(n_attempts):
+ try:
+ cluster = distributed.LocalCluster(n_workers=n_workers)
+ except distributed.TimeoutError:
+ continue
+ else:
+ return cluster
+ # only get here if all retries fail
+ pytest.skip("Failed to set up distributed LocalCluster")
+
class TestDaskContactFrequency(object):
def test_dask_integration(self):
# this is an integration test to check that dask works
dask = pytest.importorskip('dask') # pylint: disable=W0612
distributed = pytest.importorskip('dask.distributed')
# Explicitly set only 4 workers on Travis instead of 31
# Fix copied from https://github.com/spencerahill/aospy/pull/220/files
- cluster = distributed.LocalCluster(n_workers=4)
+ cluster = dask_setup_test_cluster(distributed)
client = distributed.Client(cluster)
filename = find_testfile("trajectory.pdb")
dask_freq = DaskContactFrequency(client, filename, cutoff=0.075,
n_neighbors_ignored=0)
client.close()
assert dask_freq.n_frames == 5
|
Handle dask TimeoutError exception in tests
|
## Code Before:
from .utils import *
from contact_map.dask_runner import *
class TestDaskContactFrequency(object):
def test_dask_integration(self):
# this is an integration test to check that dask works
dask = pytest.importorskip('dask') # pylint: disable=W0612
distributed = pytest.importorskip('dask.distributed')
# Explicitly set only 4 workers on Travis instead of 31
# Fix copied from https://github.com/spencerahill/aospy/pull/220/files
cluster = distributed.LocalCluster(n_workers=4)
client = distributed.Client(cluster)
filename = find_testfile("trajectory.pdb")
dask_freq = DaskContactFrequency(client, filename, cutoff=0.075,
n_neighbors_ignored=0)
client.close()
assert dask_freq.n_frames == 5
## Instruction:
Handle dask TimeoutError exception in tests
## Code After:
from .utils import *
from contact_map.dask_runner import *
def dask_setup_test_cluster(distributed, n_workers=4, n_attempts=3):
"""Set up a test cluster using dask.distributed. Try up to n_attempts
times, and skip the test if all attempts fail.
"""
cluster = None
for _ in range(n_attempts):
try:
cluster = distributed.LocalCluster(n_workers=n_workers)
except distributed.TimeoutError:
continue
else:
return cluster
# only get here if all retries fail
pytest.skip("Failed to set up distributed LocalCluster")
class TestDaskContactFrequency(object):
def test_dask_integration(self):
# this is an integration test to check that dask works
dask = pytest.importorskip('dask') # pylint: disable=W0612
distributed = pytest.importorskip('dask.distributed')
# Explicitly set only 4 workers on Travis instead of 31
# Fix copied from https://github.com/spencerahill/aospy/pull/220/files
cluster = dask_setup_test_cluster(distributed)
client = distributed.Client(cluster)
filename = find_testfile("trajectory.pdb")
dask_freq = DaskContactFrequency(client, filename, cutoff=0.075,
n_neighbors_ignored=0)
client.close()
assert dask_freq.n_frames == 5
|
from .utils import *
from contact_map.dask_runner import *
+
+ def dask_setup_test_cluster(distributed, n_workers=4, n_attempts=3):
+ """Set up a test cluster using dask.distributed. Try up to n_attempts
+ times, and skip the test if all attempts fail.
+ """
+ cluster = None
+ for _ in range(n_attempts):
+ try:
+ cluster = distributed.LocalCluster(n_workers=n_workers)
+ except distributed.TimeoutError:
+ continue
+ else:
+ return cluster
+ # only get here if all retries fail
+ pytest.skip("Failed to set up distributed LocalCluster")
+
class TestDaskContactFrequency(object):
def test_dask_integration(self):
# this is an integration test to check that dask works
dask = pytest.importorskip('dask') # pylint: disable=W0612
distributed = pytest.importorskip('dask.distributed')
# Explicitly set only 4 workers on Travis instead of 31
# Fix copied from https://github.com/spencerahill/aospy/pull/220/files
- cluster = distributed.LocalCluster(n_workers=4)
+ cluster = dask_setup_test_cluster(distributed)
client = distributed.Client(cluster)
filename = find_testfile("trajectory.pdb")
dask_freq = DaskContactFrequency(client, filename, cutoff=0.075,
n_neighbors_ignored=0)
client.close()
assert dask_freq.n_frames == 5
|
e31790412c9e869841b448f3e7f8bb4a965da81d
|
mygpo/web/templatetags/devices.py
|
mygpo/web/templatetags/devices.py
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device, None)
caption = DEVICE_TYPES_DICT.get(device, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device.type, None)
caption = DEVICE_TYPES_DICT.get(device.type, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
Fix problem with device icons
|
Fix problem with device icons
|
Python
|
agpl-3.0
|
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
- icon = DEVICE_TYPE_ICONS.get(device, None)
+ icon = DEVICE_TYPE_ICONS.get(device.type, None)
- caption = DEVICE_TYPES_DICT.get(device, None)
+ caption = DEVICE_TYPES_DICT.get(device.type, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
Fix problem with device icons
|
## Code Before:
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device, None)
caption = DEVICE_TYPES_DICT.get(device, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
## Instruction:
Fix problem with device icons
## Code After:
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device.type, None)
caption = DEVICE_TYPES_DICT.get(device.type, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
- icon = DEVICE_TYPE_ICONS.get(device, None)
+ icon = DEVICE_TYPE_ICONS.get(device.type, None)
? +++++
- caption = DEVICE_TYPES_DICT.get(device, None)
+ caption = DEVICE_TYPES_DICT.get(device.type, None)
? +++++
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
a1b47d442290ea9ce19e25cd03c1aa5e39ad2ec5
|
scikits/learn/tests/test_pca.py
|
scikits/learn/tests/test_pca.py
|
from nose.tools import assert_equals
from .. import datasets
from ..pca import PCA
iris = datasets.load_iris()
X = iris.data
def test_pca():
"""
PCA
"""
pca = PCA(k=2)
X_r = pca.fit(X).transform(X)
assert_equals(X_r.shape[1], 2)
pca = PCA()
pca.fit(X)
assert_equals(pca.explained_variance_.sum(), 1.0)
|
import numpy as np
from .. import datasets
from ..pca import PCA
iris = datasets.load_iris()
X = iris.data
def test_pca():
"""
PCA
"""
pca = PCA(k=2)
X_r = pca.fit(X).transform(X)
np.testing.assert_equal(X_r.shape[1], 2)
pca = PCA()
pca.fit(X)
np.testing.assert_almost_equal(pca.explained_variance_.sum(), 1.0, 3)
|
Fix tests to be moroe robust
|
BUG: Fix tests to be moroe robust
|
Python
|
bsd-3-clause
|
nvoron23/scikit-learn,B3AU/waveTree,sumspr/scikit-learn,frank-tancf/scikit-learn,madjelan/scikit-learn,mattilyra/scikit-learn,xzh86/scikit-learn,mwv/scikit-learn,yunfeilu/scikit-learn,JsNoNo/scikit-learn,scikit-learn/scikit-learn,Fireblend/scikit-learn,btabibian/scikit-learn,davidgbe/scikit-learn,arabenjamin/scikit-learn,kaichogami/scikit-learn,r-mart/scikit-learn,Titan-C/scikit-learn,JPFrancoia/scikit-learn,sanketloke/scikit-learn,lbishal/scikit-learn,ogrisel/scikit-learn,cdegroc/scikit-learn,andaag/scikit-learn,AlexanderFabisch/scikit-learn,aetilley/scikit-learn,sgenoud/scikit-learn,costypetrisor/scikit-learn,themrmax/scikit-learn,russel1237/scikit-learn,dsullivan7/scikit-learn,ZENGXH/scikit-learn,jereze/scikit-learn,zorojean/scikit-learn,Myasuka/scikit-learn,florian-f/sklearn,NunoEdgarGub1/scikit-learn,abimannans/scikit-learn,pianomania/scikit-learn,ankurankan/scikit-learn,kaichogami/scikit-learn,rahuldhote/scikit-learn,davidgbe/scikit-learn,giorgiop/scikit-learn,Obus/scikit-learn,kagayakidan/scikit-learn,PatrickOReilly/scikit-learn,jmschrei/scikit-learn,PatrickChrist/scikit-learn,ssaeger/scikit-learn,spallavolu/scikit-learn,liberatorqjw/scikit-learn,aflaxman/scikit-learn,jjx02230808/project0223,robbymeals/scikit-learn,wazeerzulfikar/scikit-learn,vivekmishra1991/scikit-learn,B3AU/waveTree,bigdataelephants/scikit-learn,sonnyhu/scikit-learn,ilo10/scikit-learn,cwu2011/scikit-learn,maheshakya/scikit-learn,mlyundin/scikit-learn,depet/scikit-learn,ChanderG/scikit-learn,imaculate/scikit-learn,lenovor/scikit-learn,pnedunuri/scikit-learn,ominux/scikit-learn,roxyboy/scikit-learn,Akshay0724/scikit-learn,scikit-learn/scikit-learn,alexsavio/scikit-learn,eickenberg/scikit-learn,ashhher3/scikit-learn,hsuantien/scikit-learn,chrsrds/scikit-learn,Garrett-R/scikit-learn,loli/sklearn-ensembletrees,voxlol/scikit-learn,tawsifkhan/scikit-learn,ChanChiChoi/scikit-learn,qifeigit/scikit-learn,DonBeo/scikit-learn,Nyker510/scikit-learn,tawsifkhan/scikit-learn,iismd17/scikit-learn,ilo10/scikit-learn,UNR-AERIAL/scikit-learn,pv/scikit-learn,pianomania/scikit-learn,marcocaccin/scikit-learn,q1ang/scikit-learn,jkarnows/scikit-learn,procoder317/scikit-learn,dsullivan7/scikit-learn,tosolveit/scikit-learn,abimannans/scikit-learn,PrashntS/scikit-learn,ahoyosid/scikit-learn,pv/scikit-learn,ndingwall/scikit-learn,manashmndl/scikit-learn,mlyundin/scikit-learn,joernhees/scikit-learn,glennq/scikit-learn,hainm/scikit-learn,stylianos-kampakis/scikit-learn,yonglehou/scikit-learn,aminert/scikit-learn,vshtanko/scikit-learn,aetilley/scikit-learn,Garrett-R/scikit-learn,Adai0808/scikit-learn,manhhomienbienthuy/scikit-learn,kylerbrown/scikit-learn,vinayak-mehta/scikit-learn,fabioticconi/scikit-learn,glemaitre/scikit-learn,madjelan/scikit-learn,Lawrence-Liu/scikit-learn,evgchz/scikit-learn,jakobworldpeace/scikit-learn,IshankGulati/scikit-learn,jorik041/scikit-learn,hugobowne/scikit-learn,mlyundin/scikit-learn,elkingtonmcb/scikit-learn,vinayak-mehta/scikit-learn,anurag313/scikit-learn,dingocuster/scikit-learn,ky822/scikit-learn,ZENGXH/scikit-learn,devanshdalal/scikit-learn,xubenben/scikit-learn,ZENGXH/scikit-learn,TomDLT/scikit-learn,arabenjamin/scikit-learn,victorbergelin/scikit-learn,hlin117/scikit-learn,elkingtonmcb/scikit-learn,devanshdalal/scikit-learn,shusenl/scikit-learn,luo66/scikit-learn,MartinDelzant/scikit-learn,wlamond/scikit-learn,huzq/scikit-learn,victorbergelin/scikit-learn,Lawrence-Liu/scikit-learn,scikit-learn/scikit-learn,pompiduskus/scikit-learn,alvarofierroclavero/scikit-learn,q1ang/scikit-learn,massmutual/scikit-learn,lenovor/scikit-learn,depet/scikit-learn,ivannz/scikit-learn,larsmans/scikit-learn,MartinSavc/scikit-learn,ogrisel/scikit-learn,LohithBlaze/scikit-learn,AlexandreAbraham/scikit-learn,JPFrancoia/scikit-learn,UNR-AERIAL/scikit-learn,YinongLong/scikit-learn,qifeigit/scikit-learn,0x0all/scikit-learn,Vimos/scikit-learn,spallavolu/scikit-learn,Fireblend/scikit-learn,ZENGXH/scikit-learn,3manuek/scikit-learn,MechCoder/scikit-learn,nvoron23/scikit-learn,massmutual/scikit-learn,IshankGulati/scikit-learn,petosegan/scikit-learn,tdhopper/scikit-learn,evgchz/scikit-learn,xwolf12/scikit-learn,mattgiguere/scikit-learn,Obus/scikit-learn,glennq/scikit-learn,vybstat/scikit-learn,evgchz/scikit-learn,JsNoNo/scikit-learn,kmike/scikit-learn,jmschrei/scikit-learn,abhishekgahlot/scikit-learn,mhdella/scikit-learn,DSLituiev/scikit-learn,thilbern/scikit-learn,untom/scikit-learn,CVML/scikit-learn,wanggang3333/scikit-learn,arabenjamin/scikit-learn,nomadcube/scikit-learn,xubenben/scikit-learn,mjudsp/Tsallis,nikitasingh981/scikit-learn,arahuja/scikit-learn,samuel1208/scikit-learn,glouppe/scikit-learn,hdmetor/scikit-learn,walterreade/scikit-learn,ClimbsRocks/scikit-learn,maheshakya/scikit-learn,mikebenfield/scikit-learn,macks22/scikit-learn,henrykironde/scikit-learn,f3r/scikit-learn,pratapvardhan/scikit-learn,khkaminska/scikit-learn,cybernet14/scikit-learn,hsuantien/scikit-learn,vigilv/scikit-learn,fbagirov/scikit-learn,abhishekgahlot/scikit-learn,jaidevd/scikit-learn,3manuek/scikit-learn,sarahgrogan/scikit-learn,ltiao/scikit-learn,florian-f/sklearn,idlead/scikit-learn,rohanp/scikit-learn,altairpearl/scikit-learn,rrohan/scikit-learn,btabibian/scikit-learn,lucidfrontier45/scikit-learn,Nyker510/scikit-learn,shyamalschandra/scikit-learn,giorgiop/scikit-learn,OshynSong/scikit-learn,xuewei4d/scikit-learn,glemaitre/scikit-learn,samuel1208/scikit-learn,terkkila/scikit-learn,YinongLong/scikit-learn,icdishb/scikit-learn,mhdella/scikit-learn,imaculate/scikit-learn,kjung/scikit-learn,NunoEdgarGub1/scikit-learn,clemkoa/scikit-learn,davidgbe/scikit-learn,yunfeilu/scikit-learn,lbishal/scikit-learn,petosegan/scikit-learn,rahul-c1/scikit-learn,ZenDevelopmentSystems/scikit-learn,xavierwu/scikit-learn,zorroblue/scikit-learn,mikebenfield/scikit-learn,vortex-ape/scikit-learn,vinayak-mehta/scikit-learn,shusenl/scikit-learn,saiwing-yeung/scikit-learn,bigdataelephants/scikit-learn,voxlol/scikit-learn,CVML/scikit-learn,adamgreenhall/scikit-learn,clemkoa/scikit-learn,etkirsch/scikit-learn,DSLituiev/scikit-learn,rajat1994/scikit-learn,jzt5132/scikit-learn,ningchi/scikit-learn,xubenben/scikit-learn,anntzer/scikit-learn,Sentient07/scikit-learn,sanketloke/scikit-learn,zhenv5/scikit-learn,0asa/scikit-learn,imaculate/scikit-learn,yask123/scikit-learn,justincassidy/scikit-learn,heli522/scikit-learn,petosegan/scikit-learn,cainiaocome/scikit-learn,jakirkham/scikit-learn,lazywei/scikit-learn,pnedunuri/scikit-learn,kashif/scikit-learn,tawsifkhan/scikit-learn,frank-tancf/scikit-learn,cl4rke/scikit-learn,abhishekkrthakur/scikit-learn,Titan-C/scikit-learn,florian-f/sklearn,pratapvardhan/scikit-learn,zorojean/scikit-learn,henrykironde/scikit-learn,hrjn/scikit-learn,poryfly/scikit-learn,appapantula/scikit-learn,aewhatley/scikit-learn,manashmndl/scikit-learn,marcocaccin/scikit-learn,mjgrav2001/scikit-learn,ilyes14/scikit-learn,MechCoder/scikit-learn,Djabbz/scikit-learn,robin-lai/scikit-learn,MechCoder/scikit-learn,trankmichael/scikit-learn,appapantula/scikit-learn,JeanKossaifi/scikit-learn,mfjb/scikit-learn,Windy-Ground/scikit-learn,BiaDarkia/scikit-learn,ClimbsRocks/scikit-learn,0x0all/scikit-learn,MartinSavc/scikit-learn,eickenberg/scikit-learn,murali-munna/scikit-learn,murali-munna/scikit-learn,gclenaghan/scikit-learn,IndraVikas/scikit-learn,shenzebang/scikit-learn,hugobowne/scikit-learn,pythonvietnam/scikit-learn,fyffyt/scikit-learn,kevin-intel/scikit-learn,yunfeilu/scikit-learn,Srisai85/scikit-learn,evgchz/scikit-learn,Adai0808/scikit-learn,nesterione/scikit-learn,elkingtonmcb/scikit-learn,zhenv5/scikit-learn,lesteve/scikit-learn,ngoix/OCRF,fengzhyuan/scikit-learn,kagayakidan/scikit-learn,BiaDarkia/scikit-learn,ogrisel/scikit-learn,dhruv13J/scikit-learn,vigilv/scikit-learn,jpautom/scikit-learn,adamgreenhall/scikit-learn,alexeyum/scikit-learn,ElDeveloper/scikit-learn,espg/scikit-learn,russel1237/scikit-learn,ldirer/scikit-learn,PrashntS/scikit-learn,IshankGulati/scikit-learn,xavierwu/scikit-learn,LohithBlaze/scikit-learn,tdhopper/scikit-learn,trankmichael/scikit-learn,icdishb/scikit-learn,PatrickChrist/scikit-learn,LiaoPan/scikit-learn,pypot/scikit-learn,deepesch/scikit-learn,walterreade/scikit-learn,vibhorag/scikit-learn,mojoboss/scikit-learn,ephes/scikit-learn,jjx02230808/project0223,wlamond/scikit-learn,yonglehou/scikit-learn,shangwuhencc/scikit-learn,icdishb/scikit-learn,dsquareindia/scikit-learn,jm-begon/scikit-learn,pianomania/scikit-learn,ankurankan/scikit-learn,PatrickOReilly/scikit-learn,nrhine1/scikit-learn,adamgreenhall/scikit-learn,plissonf/scikit-learn,imaculate/scikit-learn,CforED/Machine-Learning,liberatorqjw/scikit-learn,0asa/scikit-learn,ishanic/scikit-learn,potash/scikit-learn,nmayorov/scikit-learn,treycausey/scikit-learn,idlead/scikit-learn,Aasmi/scikit-learn,aabadie/scikit-learn,joernhees/scikit-learn,wazeerzulfikar/scikit-learn,ashhher3/scikit-learn,saiwing-yeung/scikit-learn,rahuldhote/scikit-learn,chrsrds/scikit-learn,nomadcube/scikit-learn,loli/semisupervisedforests,toastedcornflakes/scikit-learn,herilalaina/scikit-learn,nvoron23/scikit-learn,olologin/scikit-learn,Fireblend/scikit-learn,zihua/scikit-learn,0x0all/scikit-learn,costypetrisor/scikit-learn,jpautom/scikit-learn,hitszxp/scikit-learn,eg-zhang/scikit-learn,Achuth17/scikit-learn,fyffyt/scikit-learn,beepee14/scikit-learn,kaichogami/scikit-learn,vivekmishra1991/scikit-learn,cybernet14/scikit-learn,themrmax/scikit-learn,kjung/scikit-learn,depet/scikit-learn,jereze/scikit-learn,DSLituiev/scikit-learn,LiaoPan/scikit-learn,eg-zhang/scikit-learn,phdowling/scikit-learn,robin-lai/scikit-learn,justincassidy/scikit-learn,PatrickOReilly/scikit-learn,hsiaoyi0504/scikit-learn,themrmax/scikit-learn,sarahgrogan/scikit-learn,fabioticconi/scikit-learn,liyu1990/sklearn,ChanChiChoi/scikit-learn,mwv/scikit-learn,nhejazi/scikit-learn,henrykironde/scikit-learn,vibhorag/scikit-learn,mikebenfield/scikit-learn,JosmanPS/scikit-learn,altairpearl/scikit-learn,xzh86/scikit-learn,billy-inn/scikit-learn,aewhatley/scikit-learn,mfjb/scikit-learn,glennq/scikit-learn,mattilyra/scikit-learn,cwu2011/scikit-learn,fredhusser/scikit-learn,arjoly/scikit-learn,jkarnows/scikit-learn,tosolveit/scikit-learn,shikhardb/scikit-learn,fabianp/scikit-learn,ilyes14/scikit-learn,equialgo/scikit-learn,robbymeals/scikit-learn,MechCoder/scikit-learn,bikong2/scikit-learn,IndraVikas/scikit-learn,glennq/scikit-learn,ltiao/scikit-learn,samzhang111/scikit-learn,alvarofierroclavero/scikit-learn,gclenaghan/scikit-learn,manashmndl/scikit-learn,harshaneelhg/scikit-learn,poryfly/scikit-learn,Aasmi/scikit-learn,loli/semisupervisedforests,LohithBlaze/scikit-learn,bigdataelephants/scikit-learn,eickenberg/scikit-learn,olologin/scikit-learn,quheng/scikit-learn,wzbozon/scikit-learn,thientu/scikit-learn,shenzebang/scikit-learn,ivannz/scikit-learn,tmhm/scikit-learn,espg/scikit-learn,bthirion/scikit-learn,larsmans/scikit-learn,madjelan/scikit-learn,wanggang3333/scikit-learn,wazeerzulfikar/scikit-learn,loli/sklearn-ensembletrees,kylerbrown/scikit-learn,ahoyosid/scikit-learn,bnaul/scikit-learn,jayflo/scikit-learn,mjudsp/Tsallis,CforED/Machine-Learning,eg-zhang/scikit-learn,wzbozon/scikit-learn,mojoboss/scikit-learn,jblackburne/scikit-learn,yonglehou/scikit-learn,hitszxp/scikit-learn,marcocaccin/scikit-learn,lin-credible/scikit-learn,rvraghav93/scikit-learn,wzbozon/scikit-learn,shyamalschandra/scikit-learn,michigraber/scikit-learn,marcocaccin/scikit-learn,Adai0808/scikit-learn,sumspr/scikit-learn,h2educ/scikit-learn,arabenjamin/scikit-learn,nmayorov/scikit-learn,AIML/scikit-learn,loli/sklearn-ensembletrees,ycaihua/scikit-learn,manhhomienbienthuy/scikit-learn,rishikksh20/scikit-learn,rahul-c1/scikit-learn,RachitKansal/scikit-learn,lenovor/scikit-learn,rexshihaoren/scikit-learn,nrhine1/scikit-learn,0x0all/scikit-learn,potash/scikit-learn,samzhang111/scikit-learn,zorroblue/scikit-learn,rohanp/scikit-learn,jzt5132/scikit-learn,jaidevd/scikit-learn,sarahgrogan/scikit-learn,macks22/scikit-learn,Windy-Ground/scikit-learn,dhruv13J/scikit-learn,Sentient07/scikit-learn,amueller/scikit-learn,waterponey/scikit-learn,zuku1985/scikit-learn,AIML/scikit-learn,mblondel/scikit-learn,mlyundin/scikit-learn,betatim/scikit-learn,henridwyer/scikit-learn,hsiaoyi0504/scikit-learn,hdmetor/scikit-learn,NelisVerhoef/scikit-learn,kjung/scikit-learn,chrsrds/scikit-learn,AnasGhrab/scikit-learn,bthirion/scikit-learn,hainm/scikit-learn,sinhrks/scikit-learn,akionakamura/scikit-learn,sinhrks/scikit-learn,lucidfrontier45/scikit-learn,rexshihaoren/scikit-learn,RPGOne/scikit-learn,shangwuhencc/scikit-learn,liangz0707/scikit-learn,olologin/scikit-learn,henridwyer/scikit-learn,Lawrence-Liu/scikit-learn,anntzer/scikit-learn,moutai/scikit-learn,sgenoud/scikit-learn,mhue/scikit-learn,Jimmy-Morzaria/scikit-learn,ElDeveloper/scikit-learn,fengzhyuan/scikit-learn,zorroblue/scikit-learn,gotomypc/scikit-learn,ycaihua/scikit-learn,tosolveit/scikit-learn,sumspr/scikit-learn,ldirer/scikit-learn,rajat1994/scikit-learn,RayMick/scikit-learn,andaag/scikit-learn,themrmax/scikit-learn,btabibian/scikit-learn,hsiaoyi0504/scikit-learn,mwv/scikit-learn,vshtanko/scikit-learn,OshynSong/scikit-learn,alvarofierroclavero/scikit-learn,rexshihaoren/scikit-learn,0asa/scikit-learn,smartscheduling/scikit-learn-categorical-tree,abhishekgahlot/scikit-learn,henridwyer/scikit-learn,aabadie/scikit-learn,MohammedWasim/scikit-learn,samzhang111/scikit-learn,idlead/scikit-learn,simon-pepin/scikit-learn,rishikksh20/scikit-learn,vibhorag/scikit-learn,murali-munna/scikit-learn,liangz0707/scikit-learn,RPGOne/scikit-learn,AlexRobson/scikit-learn,nrhine1/scikit-learn,larsmans/scikit-learn,vermouthmjl/scikit-learn,xavierwu/scikit-learn,abhishekkrthakur/scikit-learn,roxyboy/scikit-learn,Vimos/scikit-learn,rexshihaoren/scikit-learn,ivannz/scikit-learn,aewhatley/scikit-learn,ycaihua/scikit-learn,nikitasingh981/scikit-learn,loli/semisupervisedforests,meduz/scikit-learn,tdhopper/scikit-learn,dsullivan7/scikit-learn,hitszxp/scikit-learn,HolgerPeters/scikit-learn,fyffyt/scikit-learn,pratapvardhan/scikit-learn,rsivapr/scikit-learn,rsivapr/scikit-learn,jseabold/scikit-learn,Barmaley-exe/scikit-learn,smartscheduling/scikit-learn-categorical-tree,sanketloke/scikit-learn,Windy-Ground/scikit-learn,abhishekgahlot/scikit-learn,Barmaley-exe/scikit-learn,vermouthmjl/scikit-learn,shangwuhencc/scikit-learn,ominux/scikit-learn,ankurankan/scikit-learn,deepesch/scikit-learn,voxlol/scikit-learn,andrewnc/scikit-learn,akionakamura/scikit-learn,thilbern/scikit-learn,betatim/scikit-learn,Clyde-fare/scikit-learn,ningchi/scikit-learn,mjudsp/Tsallis,vshtanko/scikit-learn,RomainBrault/scikit-learn,JeanKossaifi/scikit-learn,lbishal/scikit-learn,JeanKossaifi/scikit-learn,eg-zhang/scikit-learn,thientu/scikit-learn,qifeigit/scikit-learn,mblondel/scikit-learn,JsNoNo/scikit-learn,samuel1208/scikit-learn,cwu2011/scikit-learn,larsmans/scikit-learn,fzalkow/scikit-learn,TomDLT/scikit-learn,ningchi/scikit-learn,ngoix/OCRF,trungnt13/scikit-learn,fabioticconi/scikit-learn,nesterione/scikit-learn,maheshakya/scikit-learn,quheng/scikit-learn,mhdella/scikit-learn,maheshakya/scikit-learn,herilalaina/scikit-learn,massmutual/scikit-learn,macks22/scikit-learn,thilbern/scikit-learn,xzh86/scikit-learn,dsullivan7/scikit-learn,glouppe/scikit-learn,shahankhatch/scikit-learn,ycaihua/scikit-learn,untom/scikit-learn,yask123/scikit-learn,smartscheduling/scikit-learn-categorical-tree,akionakamura/scikit-learn,BiaDarkia/scikit-learn,yask123/scikit-learn,schets/scikit-learn,IssamLaradji/scikit-learn,simon-pepin/scikit-learn,shahankhatch/scikit-learn,rahul-c1/scikit-learn,466152112/scikit-learn,macks22/scikit-learn,jpautom/scikit-learn,frank-tancf/scikit-learn,harshaneelhg/scikit-learn,ChanChiChoi/scikit-learn,cauchycui/scikit-learn,mehdidc/scikit-learn,jakirkham/scikit-learn,nelson-liu/scikit-learn,quheng/scikit-learn,bnaul/scikit-learn,moutai/scikit-learn,herilalaina/scikit-learn,anntzer/scikit-learn,ilyes14/scikit-learn,gotomypc/scikit-learn,LiaoPan/scikit-learn,vybstat/scikit-learn,RachitKansal/scikit-learn,ilo10/scikit-learn,thientu/scikit-learn,rajat1994/scikit-learn,Clyde-fare/scikit-learn,Jimmy-Morzaria/scikit-learn,adamgreenhall/scikit-learn,billy-inn/scikit-learn,RPGOne/scikit-learn,poryfly/scikit-learn,ndingwall/scikit-learn,yunfeilu/scikit-learn,depet/scikit-learn,jorge2703/scikit-learn,petosegan/scikit-learn,mehdidc/scikit-learn,zhenv5/scikit-learn,kmike/scikit-learn,alexsavio/scikit-learn,jblackburne/scikit-learn,RomainBrault/scikit-learn,Windy-Ground/scikit-learn,alvarofierroclavero/scikit-learn,spallavolu/scikit-learn,AnasGhrab/scikit-learn,fredhusser/scikit-learn,shangwuhencc/scikit-learn,Djabbz/scikit-learn,potash/scikit-learn,luo66/scikit-learn,aminert/scikit-learn,xyguo/scikit-learn,Jimmy-Morzaria/scikit-learn,florian-f/sklearn,pypot/scikit-learn,cdegroc/scikit-learn,nesterione/scikit-learn,billy-inn/scikit-learn,robin-lai/scikit-learn,mattilyra/scikit-learn,dsquareindia/scikit-learn,liangz0707/scikit-learn,rrohan/scikit-learn,ky822/scikit-learn,theoryno3/scikit-learn,lucidfrontier45/scikit-learn,xiaoxiamii/scikit-learn,victorbergelin/scikit-learn,jkarnows/scikit-learn,pythonvietnam/scikit-learn,carrillo/scikit-learn,massmutual/scikit-learn,jlegendary/scikit-learn,wlamond/scikit-learn,nelson-liu/scikit-learn,moutai/scikit-learn,vermouthmjl/scikit-learn,jereze/scikit-learn,fredhusser/scikit-learn,dingocuster/scikit-learn,tmhm/scikit-learn,alexsavio/scikit-learn,AlexRobson/scikit-learn,clemkoa/scikit-learn,RachitKansal/scikit-learn,krez13/scikit-learn,JosmanPS/scikit-learn,kashif/scikit-learn,sgenoud/scikit-learn,MartinSavc/scikit-learn,mxjl620/scikit-learn,bthirion/scikit-learn,f3r/scikit-learn,evgchz/scikit-learn,nrhine1/scikit-learn,manhhomienbienthuy/scikit-learn,meduz/scikit-learn,carrillo/scikit-learn,xiaoxiamii/scikit-learn,meduz/scikit-learn,466152112/scikit-learn,wzbozon/scikit-learn,abimannans/scikit-learn,chrisburr/scikit-learn,mjgrav2001/scikit-learn,waterponey/scikit-learn,IssamLaradji/scikit-learn,cauchycui/scikit-learn,nesterione/scikit-learn,ankurankan/scikit-learn,Nyker510/scikit-learn,jjx02230808/project0223,hrjn/scikit-learn,trungnt13/scikit-learn,mehdidc/scikit-learn,nelson-liu/scikit-learn,wlamond/scikit-learn,henrykironde/scikit-learn,JPFrancoia/scikit-learn,joshloyal/scikit-learn,equialgo/scikit-learn,belltailjp/scikit-learn,belltailjp/scikit-learn,vigilv/scikit-learn,kagayakidan/scikit-learn,jayflo/scikit-learn,TomDLT/scikit-learn,pypot/scikit-learn,mjgrav2001/scikit-learn,jmschrei/scikit-learn,xavierwu/scikit-learn,djgagne/scikit-learn,cauchycui/scikit-learn,ChanderG/scikit-learn,mattgiguere/scikit-learn,anntzer/scikit-learn,ephes/scikit-learn,YinongLong/scikit-learn,belltailjp/scikit-learn,ndingwall/scikit-learn,etkirsch/scikit-learn,yanlend/scikit-learn,jmetzen/scikit-learn,sergeyf/scikit-learn,eickenberg/scikit-learn,kevin-intel/scikit-learn,mugizico/scikit-learn,sanketloke/scikit-learn,bikong2/scikit-learn,MartinDelzant/scikit-learn,russel1237/scikit-learn,HolgerPeters/scikit-learn,equialgo/scikit-learn,liberatorqjw/scikit-learn,RayMick/scikit-learn,espg/scikit-learn,poryfly/scikit-learn,shyamalschandra/scikit-learn,vermouthmjl/scikit-learn,joshloyal/scikit-learn,samuel1208/scikit-learn,arjoly/scikit-learn,arjoly/scikit-learn,IndraVikas/scikit-learn,tomlof/scikit-learn,ssaeger/scikit-learn,yyjiang/scikit-learn,cl4rke/scikit-learn,ChanderG/scikit-learn,clemkoa/scikit-learn,NelisVerhoef/scikit-learn,466152112/scikit-learn,dsquareindia/scikit-learn,appapantula/scikit-learn,loli/semisupervisedforests,zorojean/scikit-learn,alexeyum/scikit-learn,hsuantien/scikit-learn,RayMick/scikit-learn,ssaeger/scikit-learn,Obus/scikit-learn,hugobowne/scikit-learn,aabadie/scikit-learn,kjung/scikit-learn,sumspr/scikit-learn,robbymeals/scikit-learn,hdmetor/scikit-learn,andrewnc/scikit-learn,thilbern/scikit-learn,ogrisel/scikit-learn,shahankhatch/scikit-learn,elkingtonmcb/scikit-learn,rvraghav93/scikit-learn,rahuldhote/scikit-learn,ZenDevelopmentSystems/scikit-learn,fyffyt/scikit-learn,mblondel/scikit-learn,joshloyal/scikit-learn,TomDLT/scikit-learn,NelisVerhoef/scikit-learn,abhishekkrthakur/scikit-learn,khkaminska/scikit-learn,NunoEdgarGub1/scikit-learn,fabianp/scikit-learn,sonnyhu/scikit-learn,liyu1990/sklearn,anurag313/scikit-learn,anirudhjayaraman/scikit-learn,aetilley/scikit-learn,yyjiang/scikit-learn,ky822/scikit-learn,CVML/scikit-learn,Jimmy-Morzaria/scikit-learn,nmayorov/scikit-learn,UNR-AERIAL/scikit-learn,thientu/scikit-learn,MartinSavc/scikit-learn,rsivapr/scikit-learn,khkaminska/scikit-learn,RPGOne/scikit-learn,JosmanPS/scikit-learn,pompiduskus/scikit-learn,mattilyra/scikit-learn,jorik041/scikit-learn,jakobworldpeace/scikit-learn,hlin117/scikit-learn,mxjl620/scikit-learn,sgenoud/scikit-learn,sergeyf/scikit-learn,waterponey/scikit-learn,aetilley/scikit-learn,pompiduskus/scikit-learn,olologin/scikit-learn,anurag313/scikit-learn,yanlend/scikit-learn,siutanwong/scikit-learn,zihua/scikit-learn,untom/scikit-learn,amueller/scikit-learn,jakirkham/scikit-learn,shahankhatch/scikit-learn,davidgbe/scikit-learn,Achuth17/scikit-learn,MatthieuBizien/scikit-learn,fbagirov/scikit-learn,phdowling/scikit-learn,hrjn/scikit-learn,AlexandreAbraham/scikit-learn,appapantula/scikit-learn,IshankGulati/scikit-learn,siutanwong/scikit-learn,shikhardb/scikit-learn,zihua/scikit-learn,jmschrei/scikit-learn,lin-credible/scikit-learn,ZenDevelopmentSystems/scikit-learn,djgagne/scikit-learn,3manuek/scikit-learn,equialgo/scikit-learn,lucidfrontier45/scikit-learn,ndingwall/scikit-learn,fengzhyuan/scikit-learn,jorge2703/scikit-learn,mhue/scikit-learn,jseabold/scikit-learn,jorik041/scikit-learn,Barmaley-exe/scikit-learn,pv/scikit-learn,zaxtax/scikit-learn,arahuja/scikit-learn,jorik041/scikit-learn,ankurankan/scikit-learn,bnaul/scikit-learn,jakobworldpeace/scikit-learn,mhdella/scikit-learn,Titan-C/scikit-learn,AIML/scikit-learn,cl4rke/scikit-learn,anirudhjayaraman/scikit-learn,manhhomienbienthuy/scikit-learn,toastedcornflakes/scikit-learn,hdmetor/scikit-learn,terkkila/scikit-learn,HolgerPeters/scikit-learn,joernhees/scikit-learn,idlead/scikit-learn,terkkila/scikit-learn,vortex-ape/scikit-learn,quheng/scikit-learn,stylianos-kampakis/scikit-learn,yask123/scikit-learn,etkirsch/scikit-learn,zhenv5/scikit-learn,sergeyf/scikit-learn,mrshu/scikit-learn,nvoron23/scikit-learn,vivekmishra1991/scikit-learn,ivannz/scikit-learn,ahoyosid/scikit-learn,zaxtax/scikit-learn,pkruskal/scikit-learn,pnedunuri/scikit-learn,abhishekkrthakur/scikit-learn,xiaoxiamii/scikit-learn,ngoix/OCRF,lesteve/scikit-learn,Sentient07/scikit-learn,kylerbrown/scikit-learn,krez13/scikit-learn,trungnt13/scikit-learn,gotomypc/scikit-learn,mayblue9/scikit-learn,pythonvietnam/scikit-learn,DSLituiev/scikit-learn,HolgerPeters/scikit-learn,rsivapr/scikit-learn,maheshakya/scikit-learn,pkruskal/scikit-learn,shyamalschandra/scikit-learn,PatrickChrist/scikit-learn,belltailjp/scikit-learn,beepee14/scikit-learn,glouppe/scikit-learn,toastedcornflakes/scikit-learn,rishikksh20/scikit-learn,f3r/scikit-learn,akionakamura/scikit-learn,anirudhjayaraman/scikit-learn,carrillo/scikit-learn,devanshdalal/scikit-learn,vortex-ape/scikit-learn,mojoboss/scikit-learn,theoryno3/scikit-learn,kevin-intel/scikit-learn,gclenaghan/scikit-learn,wanggang3333/scikit-learn,lazywei/scikit-learn,justincassidy/scikit-learn,zuku1985/scikit-learn,yyjiang/scikit-learn,JosmanPS/scikit-learn,sarahgrogan/scikit-learn,0asa/scikit-learn,Fireblend/scikit-learn,PatrickOReilly/scikit-learn,voxlol/scikit-learn,jm-begon/scikit-learn,cl4rke/scikit-learn,kaichogami/scikit-learn,altairpearl/scikit-learn,AlexRobson/scikit-learn,sinhrks/scikit-learn,RomainBrault/scikit-learn,mhue/scikit-learn,Aasmi/scikit-learn,mayblue9/scikit-learn,liyu1990/sklearn,fabioticconi/scikit-learn,andaag/scikit-learn,ishanic/scikit-learn,heli522/scikit-learn,tosolveit/scikit-learn,rishikksh20/scikit-learn,mayblue9/scikit-learn,murali-munna/scikit-learn,mwv/scikit-learn,rvraghav93/scikit-learn,zuku1985/scikit-learn,raghavrv/scikit-learn,Myasuka/scikit-learn,huzq/scikit-learn,mfjb/scikit-learn,LohithBlaze/scikit-learn,chrsrds/scikit-learn,jseabold/scikit-learn,yanlend/scikit-learn,Adai0808/scikit-learn,tmhm/scikit-learn,pythonvietnam/scikit-learn,huobaowangxi/scikit-learn,theoryno3/scikit-learn,hugobowne/scikit-learn,NunoEdgarGub1/scikit-learn,jayflo/scikit-learn,alexeyum/scikit-learn,terkkila/scikit-learn,ky822/scikit-learn,espg/scikit-learn,ZenDevelopmentSystems/scikit-learn,Achuth17/scikit-learn,jlegendary/scikit-learn,bigdataelephants/scikit-learn,mugizico/scikit-learn,JeanKossaifi/scikit-learn,krez13/scikit-learn,fabianp/scikit-learn,shusenl/scikit-learn,icdishb/scikit-learn,mjudsp/Tsallis,etkirsch/scikit-learn,carrillo/scikit-learn,Nyker510/scikit-learn,moutai/scikit-learn,mattgiguere/scikit-learn,pratapvardhan/scikit-learn,bnaul/scikit-learn,arahuja/scikit-learn,466152112/scikit-learn,rrohan/scikit-learn,ngoix/OCRF,shenzebang/scikit-learn,kagayakidan/scikit-learn,PrashntS/scikit-learn,xwolf12/scikit-learn,Myasuka/scikit-learn,yanlend/scikit-learn,vybstat/scikit-learn,Akshay0724/scikit-learn,plissonf/scikit-learn,devanshdalal/scikit-learn,meduz/scikit-learn,stylianos-kampakis/scikit-learn,shenzebang/scikit-learn,fzalkow/scikit-learn,wazeerzulfikar/scikit-learn,Lawrence-Liu/scikit-learn,ldirer/scikit-learn,liangz0707/scikit-learn,sonnyhu/scikit-learn,Vimos/scikit-learn,plissonf/scikit-learn,ominux/scikit-learn,henridwyer/scikit-learn,huzq/scikit-learn,nikitasingh981/scikit-learn,AlexanderFabisch/scikit-learn,xiaoxiamii/scikit-learn,pianomania/scikit-learn,spallavolu/scikit-learn,tomlof/scikit-learn,trankmichael/scikit-learn,xyguo/scikit-learn,waterponey/scikit-learn,B3AU/waveTree,jseabold/scikit-learn,pkruskal/scikit-learn,roxyboy/scikit-learn,kashif/scikit-learn,jereze/scikit-learn,altairpearl/scikit-learn,cwu2011/scikit-learn,jorge2703/scikit-learn,fabianp/scikit-learn,JPFrancoia/scikit-learn,aflaxman/scikit-learn,andrewnc/scikit-learn,schets/scikit-learn,pompiduskus/scikit-learn,phdowling/scikit-learn,fengzhyuan/scikit-learn,Barmaley-exe/scikit-learn,ashhher3/scikit-learn,roxyboy/scikit-learn,treycausey/scikit-learn,khkaminska/scikit-learn,zaxtax/scikit-learn,q1ang/scikit-learn,justincassidy/scikit-learn,chrisburr/scikit-learn,joernhees/scikit-learn,cdegroc/scikit-learn,vibhorag/scikit-learn,jjx02230808/project0223,manashmndl/scikit-learn,q1ang/scikit-learn,krez13/scikit-learn,sinhrks/scikit-learn,kmike/scikit-learn,CVML/scikit-learn,Garrett-R/scikit-learn,florian-f/sklearn,huobaowangxi/scikit-learn,cainiaocome/scikit-learn,dingocuster/scikit-learn,CforED/Machine-Learning,AlexandreAbraham/scikit-learn,DonBeo/scikit-learn,mattilyra/scikit-learn,pypot/scikit-learn,jm-begon/scikit-learn,gclenaghan/scikit-learn,Djabbz/scikit-learn,Aasmi/scikit-learn,mblondel/scikit-learn,rahul-c1/scikit-learn,jm-begon/scikit-learn,zaxtax/scikit-learn,ngoix/OCRF,tomlof/scikit-learn,aewhatley/scikit-learn,jaidevd/scikit-learn,abimannans/scikit-learn,cauchycui/scikit-learn,toastedcornflakes/scikit-learn,victorbergelin/scikit-learn,h2educ/scikit-learn,russel1237/scikit-learn,treycausey/scikit-learn,xzh86/scikit-learn,Djabbz/scikit-learn,rvraghav93/scikit-learn,kylerbrown/scikit-learn,trungnt13/scikit-learn,ishanic/scikit-learn,phdowling/scikit-learn,vortex-ape/scikit-learn,mugizico/scikit-learn,AlexRobson/scikit-learn,lenovor/scikit-learn,PrashntS/scikit-learn,jmetzen/scikit-learn,cainiaocome/scikit-learn,btabibian/scikit-learn,ldirer/scikit-learn,B3AU/waveTree,aminert/scikit-learn,hlin117/scikit-learn,lin-credible/scikit-learn,depet/scikit-learn,anirudhjayaraman/scikit-learn,h2educ/scikit-learn,chrisburr/scikit-learn,schets/scikit-learn,mfjb/scikit-learn,MartinDelzant/scikit-learn,giorgiop/scikit-learn,heli522/scikit-learn,jayflo/scikit-learn,xuewei4d/scikit-learn,ClimbsRocks/scikit-learn,tmhm/scikit-learn,MatthieuBizien/scikit-learn,vinayak-mehta/scikit-learn,arjoly/scikit-learn,Srisai85/scikit-learn,bhargav/scikit-learn,mjudsp/Tsallis,billy-inn/scikit-learn,herilalaina/scikit-learn,hsiaoyi0504/scikit-learn,jaidevd/scikit-learn,rrohan/scikit-learn,Srisai85/scikit-learn,schets/scikit-learn,MartinDelzant/scikit-learn,aminert/scikit-learn,lucidfrontier45/scikit-learn,vshtanko/scikit-learn,nelson-liu/scikit-learn,BiaDarkia/scikit-learn,larsmans/scikit-learn,cybernet14/scikit-learn,rohanp/scikit-learn,hrjn/scikit-learn,mxjl620/scikit-learn,RomainBrault/scikit-learn,lin-credible/scikit-learn,michigraber/scikit-learn,zihua/scikit-learn,walterreade/scikit-learn,luo66/scikit-learn,hitszxp/scikit-learn,hlin117/scikit-learn,JsNoNo/scikit-learn,untom/scikit-learn,f3r/scikit-learn,beepee14/scikit-learn,andrewnc/scikit-learn,RachitKansal/scikit-learn,kmike/scikit-learn,fzalkow/scikit-learn,ElDeveloper/scikit-learn,RayMick/scikit-learn,chrisburr/scikit-learn,Titan-C/scikit-learn,ominux/scikit-learn,rsivapr/scikit-learn,trankmichael/scikit-learn,kevin-intel/scikit-learn,lbishal/scikit-learn,mehdidc/scikit-learn,Garrett-R/scikit-learn,kashif/scikit-learn,dhruv13J/scikit-learn,jkarnows/scikit-learn,LiaoPan/scikit-learn,simon-pepin/scikit-learn,nikitasingh981/scikit-learn,shusenl/scikit-learn,fzalkow/scikit-learn,jzt5132/scikit-learn,nmayorov/scikit-learn,rajat1994/scikit-learn,h2educ/scikit-learn,huobaowangxi/scikit-learn,ycaihua/scikit-learn,sergeyf/scikit-learn,xubenben/scikit-learn,glemaitre/scikit-learn,nhejazi/scikit-learn,Garrett-R/scikit-learn,abhishekgahlot/scikit-learn,jlegendary/scikit-learn,yyjiang/scikit-learn,raghavrv/scikit-learn,mjgrav2001/scikit-learn,nomadcube/scikit-learn,loli/sklearn-ensembletrees,madjelan/scikit-learn,tomlof/scikit-learn,fredhusser/scikit-learn,MohammedWasim/scikit-learn,betatim/scikit-learn,deepesch/scikit-learn,mrshu/scikit-learn,mayblue9/scikit-learn,deepesch/scikit-learn,bthirion/scikit-learn,0asa/scikit-learn,michigraber/scikit-learn,IssamLaradji/scikit-learn,iismd17/scikit-learn,liberatorqjw/scikit-learn,pkruskal/scikit-learn,mattgiguere/scikit-learn,YinongLong/scikit-learn,djgagne/scikit-learn,Srisai85/scikit-learn,dingocuster/scikit-learn,DonBeo/scikit-learn,alexeyum/scikit-learn,saiwing-yeung/scikit-learn,bhargav/scikit-learn,OshynSong/scikit-learn,mxjl620/scikit-learn,jpautom/scikit-learn,nomadcube/scikit-learn,scikit-learn/scikit-learn,jmetzen/scikit-learn,beepee14/scikit-learn,mikebenfield/scikit-learn,r-mart/scikit-learn,Sentient07/scikit-learn,AIML/scikit-learn,ahoyosid/scikit-learn,frank-tancf/scikit-learn,aflaxman/scikit-learn,xuewei4d/scikit-learn,giorgiop/scikit-learn,IssamLaradji/scikit-learn,0x0all/scikit-learn,ClimbsRocks/scikit-learn,arahuja/scikit-learn,anurag313/scikit-learn,lazywei/scikit-learn,samzhang111/scikit-learn,ElDeveloper/scikit-learn,AlexanderFabisch/scikit-learn,ningchi/scikit-learn,aflaxman/scikit-learn,amueller/scikit-learn,ilyes14/scikit-learn,hsuantien/scikit-learn,Achuth17/scikit-learn,theoryno3/scikit-learn,B3AU/waveTree,procoder317/scikit-learn,CforED/Machine-Learning,huzq/scikit-learn,procoder317/scikit-learn,UNR-AERIAL/scikit-learn,Vimos/scikit-learn,r-mart/scikit-learn,jlegendary/scikit-learn,costypetrisor/scikit-learn,jzt5132/scikit-learn,xwolf12/scikit-learn,iismd17/scikit-learn,smartscheduling/scikit-learn-categorical-tree,DonBeo/scikit-learn,stylianos-kampakis/scikit-learn,jorge2703/scikit-learn,nhejazi/scikit-learn,huobaowangxi/scikit-learn,mrshu/scikit-learn,AlexandreAbraham/scikit-learn,potash/scikit-learn,treycausey/scikit-learn,alexsavio/scikit-learn,mojoboss/scikit-learn,jakobworldpeace/scikit-learn,dhruv13J/scikit-learn,siutanwong/scikit-learn,ChanderG/scikit-learn,Akshay0724/scikit-learn,ephes/scikit-learn,MohammedWasim/scikit-learn,MohammedWasim/scikit-learn,yonglehou/scikit-learn,ltiao/scikit-learn,simon-pepin/scikit-learn,IndraVikas/scikit-learn,shikhardb/scikit-learn,siutanwong/scikit-learn,AnasGhrab/scikit-learn,kmike/scikit-learn,Clyde-fare/scikit-learn,saiwing-yeung/scikit-learn,xwolf12/scikit-learn,Clyde-fare/scikit-learn,bikong2/scikit-learn,cdegroc/scikit-learn,lesteve/scikit-learn,eickenberg/scikit-learn,jblackburne/scikit-learn,xuewei4d/scikit-learn,amueller/scikit-learn,michigraber/scikit-learn,mugizico/scikit-learn,luo66/scikit-learn,procoder317/scikit-learn,AlexanderFabisch/scikit-learn,r-mart/scikit-learn,joshloyal/scikit-learn,ilo10/scikit-learn,jmetzen/scikit-learn,cybernet14/scikit-learn,hainm/scikit-learn,rahuldhote/scikit-learn,lesteve/scikit-learn,ephes/scikit-learn,zorojean/scikit-learn,mrshu/scikit-learn,MatthieuBizien/scikit-learn,gotomypc/scikit-learn,PatrickChrist/scikit-learn,ltiao/scikit-learn,shikhardb/scikit-learn,xyguo/scikit-learn,betatim/scikit-learn,hainm/scikit-learn,fbagirov/scikit-learn,aabadie/scikit-learn,Obus/scikit-learn,zuku1985/scikit-learn,ngoix/OCRF,harshaneelhg/scikit-learn,jblackburne/scikit-learn,NelisVerhoef/scikit-learn,tdhopper/scikit-learn,hitszxp/scikit-learn,bikong2/scikit-learn,Myasuka/scikit-learn,qifeigit/scikit-learn,costypetrisor/scikit-learn,robin-lai/scikit-learn,ssaeger/scikit-learn,tawsifkhan/scikit-learn,liyu1990/sklearn,bhargav/scikit-learn,walterreade/scikit-learn,vivekmishra1991/scikit-learn,ashhher3/scikit-learn,pnedunuri/scikit-learn,heli522/scikit-learn,dsquareindia/scikit-learn,robbymeals/scikit-learn,nhejazi/scikit-learn,iismd17/scikit-learn,MatthieuBizien/scikit-learn,andaag/scikit-learn,sgenoud/scikit-learn,raghavrv/scikit-learn,pv/scikit-learn,djgagne/scikit-learn,ishanic/scikit-learn,loli/sklearn-ensembletrees,sonnyhu/scikit-learn,vigilv/scikit-learn,bhargav/scikit-learn,plissonf/scikit-learn,zorroblue/scikit-learn,jakirkham/scikit-learn,lazywei/scikit-learn,glouppe/scikit-learn,fbagirov/scikit-learn,xyguo/scikit-learn,ChanChiChoi/scikit-learn,3manuek/scikit-learn,glemaitre/scikit-learn,Akshay0724/scikit-learn,mrshu/scikit-learn,wanggang3333/scikit-learn,raghavrv/scikit-learn,harshaneelhg/scikit-learn,cainiaocome/scikit-learn,vybstat/scikit-learn,AnasGhrab/scikit-learn,treycausey/scikit-learn,mhue/scikit-learn,OshynSong/scikit-learn,rohanp/scikit-learn
|
- from nose.tools import assert_equals
+ import numpy as np
from .. import datasets
from ..pca import PCA
iris = datasets.load_iris()
X = iris.data
def test_pca():
"""
PCA
"""
pca = PCA(k=2)
X_r = pca.fit(X).transform(X)
- assert_equals(X_r.shape[1], 2)
+ np.testing.assert_equal(X_r.shape[1], 2)
pca = PCA()
pca.fit(X)
- assert_equals(pca.explained_variance_.sum(), 1.0)
+ np.testing.assert_almost_equal(pca.explained_variance_.sum(), 1.0, 3)
|
Fix tests to be moroe robust
|
## Code Before:
from nose.tools import assert_equals
from .. import datasets
from ..pca import PCA
iris = datasets.load_iris()
X = iris.data
def test_pca():
"""
PCA
"""
pca = PCA(k=2)
X_r = pca.fit(X).transform(X)
assert_equals(X_r.shape[1], 2)
pca = PCA()
pca.fit(X)
assert_equals(pca.explained_variance_.sum(), 1.0)
## Instruction:
Fix tests to be moroe robust
## Code After:
import numpy as np
from .. import datasets
from ..pca import PCA
iris = datasets.load_iris()
X = iris.data
def test_pca():
"""
PCA
"""
pca = PCA(k=2)
X_r = pca.fit(X).transform(X)
np.testing.assert_equal(X_r.shape[1], 2)
pca = PCA()
pca.fit(X)
np.testing.assert_almost_equal(pca.explained_variance_.sum(), 1.0, 3)
|
- from nose.tools import assert_equals
+ import numpy as np
from .. import datasets
from ..pca import PCA
iris = datasets.load_iris()
X = iris.data
def test_pca():
"""
PCA
"""
pca = PCA(k=2)
X_r = pca.fit(X).transform(X)
- assert_equals(X_r.shape[1], 2)
? -
+ np.testing.assert_equal(X_r.shape[1], 2)
? +++++++++++
pca = PCA()
pca.fit(X)
- assert_equals(pca.explained_variance_.sum(), 1.0)
? -
+ np.testing.assert_almost_equal(pca.explained_variance_.sum(), 1.0, 3)
? +++++++++++ +++++++ + +++
|
e4c79021e21c050e7dcd769f80e2457d97e67506
|
future/builtins/backports/newopen.py
|
future/builtins/backports/newopen.py
|
class open(object):
"""Wrapper providing key part of Python 3 open() interface.
From IPython's py3compat.py module. License: BSD.
"""
def __init__(self, fname, mode="r", encoding="utf-8"):
self.f = orig_open(fname, mode)
self.enc = encoding
def write(self, s):
return self.f.write(s.encode(self.enc))
def read(self, size=-1):
return self.f.read(size).decode(self.enc)
def close(self):
return self.f.close()
def __enter__(self):
return self
def __exit__(self, etype, value, traceback):
self.f.close()
|
open_ = open
class open(object):
"""Wrapper providing key part of Python 3 open() interface.
From IPython's py3compat.py module. License: BSD.
"""
def __init__(self, fname, mode="r", encoding="utf-8"):
self.f = open_(fname, mode)
self.enc = encoding
def write(self, s):
return self.f.write(s.encode(self.enc))
def read(self, size=-1):
return self.f.read(size).decode(self.enc)
def close(self):
return self.f.close()
def __enter__(self):
return self
def __exit__(self, etype, value, traceback):
self.f.close()
|
Update new open() function docs (and fix bug)
|
Update new open() function docs (and fix bug)
|
Python
|
mit
|
michaelpacer/python-future,PythonCharmers/python-future,krischer/python-future,PythonCharmers/python-future,krischer/python-future,QuLogic/python-future,QuLogic/python-future,michaelpacer/python-future
|
+
+ open_ = open
+
class open(object):
"""Wrapper providing key part of Python 3 open() interface.
From IPython's py3compat.py module. License: BSD.
"""
def __init__(self, fname, mode="r", encoding="utf-8"):
- self.f = orig_open(fname, mode)
+ self.f = open_(fname, mode)
self.enc = encoding
def write(self, s):
return self.f.write(s.encode(self.enc))
def read(self, size=-1):
return self.f.read(size).decode(self.enc)
def close(self):
return self.f.close()
def __enter__(self):
return self
def __exit__(self, etype, value, traceback):
self.f.close()
|
Update new open() function docs (and fix bug)
|
## Code Before:
class open(object):
"""Wrapper providing key part of Python 3 open() interface.
From IPython's py3compat.py module. License: BSD.
"""
def __init__(self, fname, mode="r", encoding="utf-8"):
self.f = orig_open(fname, mode)
self.enc = encoding
def write(self, s):
return self.f.write(s.encode(self.enc))
def read(self, size=-1):
return self.f.read(size).decode(self.enc)
def close(self):
return self.f.close()
def __enter__(self):
return self
def __exit__(self, etype, value, traceback):
self.f.close()
## Instruction:
Update new open() function docs (and fix bug)
## Code After:
open_ = open
class open(object):
"""Wrapper providing key part of Python 3 open() interface.
From IPython's py3compat.py module. License: BSD.
"""
def __init__(self, fname, mode="r", encoding="utf-8"):
self.f = open_(fname, mode)
self.enc = encoding
def write(self, s):
return self.f.write(s.encode(self.enc))
def read(self, size=-1):
return self.f.read(size).decode(self.enc)
def close(self):
return self.f.close()
def __enter__(self):
return self
def __exit__(self, etype, value, traceback):
self.f.close()
|
+
+ open_ = open
+
class open(object):
"""Wrapper providing key part of Python 3 open() interface.
From IPython's py3compat.py module. License: BSD.
"""
def __init__(self, fname, mode="r", encoding="utf-8"):
- self.f = orig_open(fname, mode)
? -----
+ self.f = open_(fname, mode)
? +
self.enc = encoding
def write(self, s):
return self.f.write(s.encode(self.enc))
def read(self, size=-1):
return self.f.read(size).decode(self.enc)
def close(self):
return self.f.close()
def __enter__(self):
return self
def __exit__(self, etype, value, traceback):
self.f.close()
|
1daf5825580d31e3f2825b5b5edfaa2aed8146fe
|
mopidy/internal/gi.py
|
mopidy/internal/gi.py
|
from __future__ import absolute_import, unicode_literals
import textwrap
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstPbutils', '1.0')
from gi.repository import GLib, GObject, Gst, GstPbutils
except ImportError:
print(textwrap.dedent("""
ERROR: A GObject Python package was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
else:
Gst.is_initialized() or Gst.init()
__all__ = [
'GLib',
'GObject',
'Gst',
'GstPbutils',
'gi',
]
|
from __future__ import absolute_import, unicode_literals
import sys
import textwrap
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstPbutils', '1.0')
from gi.repository import GLib, GObject, Gst, GstPbutils
except ImportError:
print(textwrap.dedent("""
ERROR: A GObject Python package was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
else:
Gst.is_initialized() or Gst.init()
REQUIRED_GST_VERSION = (1, 2)
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
'ERROR: Mopidy requires GStreamer >= %s, but found %s.' % (
'.'.join(map(str, REQUIRED_GST_VERSION)), Gst.version_string()))
__all__ = [
'GLib',
'GObject',
'Gst',
'GstPbutils',
'gi',
]
|
Check GStreamer version on start
|
gst1: Check GStreamer version on start
If GStreamer is too old, it fails like this:
$ mopidy
ERROR: Mopidy requires GStreamer >= 1.2, but found GStreamer 1.0.0.
|
Python
|
apache-2.0
|
kingosticks/mopidy,jodal/mopidy,mokieyue/mopidy,tkem/mopidy,kingosticks/mopidy,tkem/mopidy,mokieyue/mopidy,adamcik/mopidy,adamcik/mopidy,jodal/mopidy,mopidy/mopidy,vrs01/mopidy,ZenithDK/mopidy,ZenithDK/mopidy,tkem/mopidy,jodal/mopidy,jcass77/mopidy,mopidy/mopidy,adamcik/mopidy,tkem/mopidy,mokieyue/mopidy,jcass77/mopidy,ZenithDK/mopidy,vrs01/mopidy,kingosticks/mopidy,mopidy/mopidy,ZenithDK/mopidy,vrs01/mopidy,jcass77/mopidy,vrs01/mopidy,mokieyue/mopidy
|
from __future__ import absolute_import, unicode_literals
+ import sys
import textwrap
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstPbutils', '1.0')
from gi.repository import GLib, GObject, Gst, GstPbutils
except ImportError:
print(textwrap.dedent("""
ERROR: A GObject Python package was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
else:
Gst.is_initialized() or Gst.init()
+ REQUIRED_GST_VERSION = (1, 2)
+
+ if Gst.version() < REQUIRED_GST_VERSION:
+ sys.exit(
+ 'ERROR: Mopidy requires GStreamer >= %s, but found %s.' % (
+ '.'.join(map(str, REQUIRED_GST_VERSION)), Gst.version_string()))
+
+
__all__ = [
'GLib',
'GObject',
'Gst',
'GstPbutils',
'gi',
]
|
Check GStreamer version on start
|
## Code Before:
from __future__ import absolute_import, unicode_literals
import textwrap
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstPbutils', '1.0')
from gi.repository import GLib, GObject, Gst, GstPbutils
except ImportError:
print(textwrap.dedent("""
ERROR: A GObject Python package was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
else:
Gst.is_initialized() or Gst.init()
__all__ = [
'GLib',
'GObject',
'Gst',
'GstPbutils',
'gi',
]
## Instruction:
Check GStreamer version on start
## Code After:
from __future__ import absolute_import, unicode_literals
import sys
import textwrap
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstPbutils', '1.0')
from gi.repository import GLib, GObject, Gst, GstPbutils
except ImportError:
print(textwrap.dedent("""
ERROR: A GObject Python package was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
else:
Gst.is_initialized() or Gst.init()
REQUIRED_GST_VERSION = (1, 2)
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
'ERROR: Mopidy requires GStreamer >= %s, but found %s.' % (
'.'.join(map(str, REQUIRED_GST_VERSION)), Gst.version_string()))
__all__ = [
'GLib',
'GObject',
'Gst',
'GstPbutils',
'gi',
]
|
from __future__ import absolute_import, unicode_literals
+ import sys
import textwrap
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstPbutils', '1.0')
from gi.repository import GLib, GObject, Gst, GstPbutils
except ImportError:
print(textwrap.dedent("""
ERROR: A GObject Python package was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
else:
Gst.is_initialized() or Gst.init()
+ REQUIRED_GST_VERSION = (1, 2)
+
+ if Gst.version() < REQUIRED_GST_VERSION:
+ sys.exit(
+ 'ERROR: Mopidy requires GStreamer >= %s, but found %s.' % (
+ '.'.join(map(str, REQUIRED_GST_VERSION)), Gst.version_string()))
+
+
__all__ = [
'GLib',
'GObject',
'Gst',
'GstPbutils',
'gi',
]
|
1312dc95d9c25897c11c8e818edcb9cd2b6a32f7
|
ecommerce/extensions/app.py
|
ecommerce/extensions/app.py
|
from oscar import app
class EdxShop(app.Shop):
# URLs are only visible to users with staff permissions
default_permissions = 'is_staff'
application = EdxShop()
|
from oscar import app
from oscar.core.application import Application
class EdxShop(app.Shop):
# URLs are only visible to users with staff permissions
default_permissions = 'is_staff'
# Override core app instances with blank application instances to exclude their URLs.
promotions_app = Application()
catalogue_app = Application()
offer_app = Application()
search_app = Application()
application = EdxShop()
|
Move the security fix into Eucalyptus
|
Move the security fix into Eucalyptus
|
Python
|
agpl-3.0
|
mferenca/HMS-ecommerce,mferenca/HMS-ecommerce,mferenca/HMS-ecommerce
|
from oscar import app
+ from oscar.core.application import Application
class EdxShop(app.Shop):
# URLs are only visible to users with staff permissions
default_permissions = 'is_staff'
+ # Override core app instances with blank application instances to exclude their URLs.
+ promotions_app = Application()
+ catalogue_app = Application()
+ offer_app = Application()
+ search_app = Application()
+
application = EdxShop()
|
Move the security fix into Eucalyptus
|
## Code Before:
from oscar import app
class EdxShop(app.Shop):
# URLs are only visible to users with staff permissions
default_permissions = 'is_staff'
application = EdxShop()
## Instruction:
Move the security fix into Eucalyptus
## Code After:
from oscar import app
from oscar.core.application import Application
class EdxShop(app.Shop):
# URLs are only visible to users with staff permissions
default_permissions = 'is_staff'
# Override core app instances with blank application instances to exclude their URLs.
promotions_app = Application()
catalogue_app = Application()
offer_app = Application()
search_app = Application()
application = EdxShop()
|
from oscar import app
+ from oscar.core.application import Application
class EdxShop(app.Shop):
# URLs are only visible to users with staff permissions
default_permissions = 'is_staff'
+ # Override core app instances with blank application instances to exclude their URLs.
+ promotions_app = Application()
+ catalogue_app = Application()
+ offer_app = Application()
+ search_app = Application()
+
application = EdxShop()
|
b0202e8882f792feb041070baff7370cacf73751
|
tests/test_api.py
|
tests/test_api.py
|
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
|
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
class TestNewApi(TestCase):
def setUp(self):
self.process = subprocess.Popen(['openfisca', 'serve'])
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:6000/parameters', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:6000 after 10s")
|
Test france compatibility with the new API
|
Test france compatibility with the new API
|
Python
|
agpl-3.0
|
antoinearnoud/openfisca-france,sgmap/openfisca-france,sgmap/openfisca-france,antoinearnoud/openfisca-france
|
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
+
+ class TestNewApi(TestCase):
+
+ def setUp(self):
+ self.process = subprocess.Popen(['openfisca', 'serve'])
+
+ def tearDown(self):
+ self.process.terminate()
+
+ def test_response(self):
+ try:
+ subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:6000/parameters', '--output-document=/dev/null'])
+ except subprocess.CalledProcessError:
+ raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:6000 after 10s")
+
|
Test france compatibility with the new API
|
## Code Before:
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
## Instruction:
Test france compatibility with the new API
## Code After:
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
class TestNewApi(TestCase):
def setUp(self):
self.process = subprocess.Popen(['openfisca', 'serve'])
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:6000/parameters', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:6000 after 10s")
|
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
+
+
+ class TestNewApi(TestCase):
+
+ def setUp(self):
+ self.process = subprocess.Popen(['openfisca', 'serve'])
+
+ def tearDown(self):
+ self.process.terminate()
+
+ def test_response(self):
+ try:
+ subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:6000/parameters', '--output-document=/dev/null'])
+ except subprocess.CalledProcessError:
+ raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:6000 after 10s")
|
f28716fba7f3b351b37fdfbb6e6cd1225592da57
|
example/app/templatetags/sqlformat.py
|
example/app/templatetags/sqlformat.py
|
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
return sqlparse.format(str(sql), reindent=True)
|
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
'''
Format SQL queries.
'''
return sqlparse.format(str(sql), reindent=True, wrap_after=120)
|
Use less vertical space in query formatting
|
Use less vertical space in query formatting
|
Python
|
bsd-3-clause
|
zostera/django-modeltrans,zostera/django-modeltrans
|
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
+ '''
+ Format SQL queries.
+ '''
- return sqlparse.format(str(sql), reindent=True)
+ return sqlparse.format(str(sql), reindent=True, wrap_after=120)
|
Use less vertical space in query formatting
|
## Code Before:
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
return sqlparse.format(str(sql), reindent=True)
## Instruction:
Use less vertical space in query formatting
## Code After:
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
'''
Format SQL queries.
'''
return sqlparse.format(str(sql), reindent=True, wrap_after=120)
|
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
+ '''
+ Format SQL queries.
+ '''
- return sqlparse.format(str(sql), reindent=True)
+ return sqlparse.format(str(sql), reindent=True, wrap_after=120)
? ++++++++++++++++
|
f2fa55c8d2f94bd186fc6c47b8ce00fb87c22aaf
|
tensorflow/contrib/autograph/converters/__init__.py
|
tensorflow/contrib/autograph/converters/__init__.py
|
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Define a base transformer class that can recognize skip_processing
# TODO(mdan): All converters are incomplete, especially those that change blocks
|
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Naming conventions:
# * each converter should specialize on a single idiom; be consistent with
# the Python reference for naming
# * all converters inherit core.converter.Base
# * module names describe the idiom that the converter covers, plural
# * the converter class is named consistent with the module, singular and
# includes the word Transformer
#
# Example:
#
# lists.py
# class ListTransformer(converter.Base)
|
Add a few naming guidelines for the converter library.
|
Add a few naming guidelines for the converter library.
PiperOrigin-RevId: 204199604
|
Python
|
apache-2.0
|
alsrgv/tensorflow,annarev/tensorflow,sarvex/tensorflow,ppwwyyxx/tensorflow,chemelnucfin/tensorflow,jalexvig/tensorflow,ageron/tensorflow,gunan/tensorflow,gautam1858/tensorflow,seanli9jan/tensorflow,girving/tensorflow,jart/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sarvex/tensorflow,xzturn/tensorflow,aldian/tensorflow,jhseu/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,ageron/tensorflow,brchiu/tensorflow,freedomtan/tensorflow,freedomtan/tensorflow,xzturn/tensorflow,jalexvig/tensorflow,dancingdan/tensorflow,aldian/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,aam-at/tensorflow,girving/tensorflow,Intel-tensorflow/tensorflow,renyi533/tensorflow,alsrgv/tensorflow,chemelnucfin/tensorflow,kobejean/tensorflow,adit-chandra/tensorflow,theflofly/tensorflow,gautam1858/tensorflow,manipopopo/tensorflow,brchiu/tensorflow,jbedorf/tensorflow,ageron/tensorflow,gunan/tensorflow,hfp/tensorflow-xsmm,aam-at/tensorflow,AnishShah/tensorflow,AnishShah/tensorflow,kobejean/tensorflow,alshedivat/tensorflow,davidzchen/tensorflow,adit-chandra/tensorflow,jhseu/tensorflow,kobejean/tensorflow,freedomtan/tensorflow,theflofly/tensorflow,renyi533/tensorflow,freedomtan/tensorflow,kobejean/tensorflow,dancingdan/tensorflow,frreiss/tensorflow-fred,xodus7/tensorflow,alsrgv/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,asimshankar/tensorflow,renyi533/tensorflow,alsrgv/tensorflow,gunan/tensorflow,manipopopo/tensorflow,xodus7/tensorflow,xzturn/tensorflow,karllessard/tensorflow,hfp/tensorflow-xsmm,ageron/tensorflow,adit-chandra/tensorflow,gautam1858/tensorflow,cxxgtxy/tensorflow,jendap/tensorflow,karllessard/tensorflow,petewarden/tensorflow,DavidNorman/tensorflow,yongtang/tensorflow,gunan/tensorflow,apark263/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,xodus7/tensorflow,sarvex/tensorflow,yongtang/tensorflow,cxxgtxy/tensorflow,ppwwyyxx/tensorflow,apark263/tensorflow,xzturn/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,theflofly/tensorflow,gautam1858/tensorflow,hehongliang/tensorflow,kobejean/tensorflow,chemelnucfin/tensorflow,jbedorf/tensorflow,hehongliang/tensorflow,manipopopo/tensorflow,cxxgtxy/tensorflow,seanli9jan/tensorflow,AnishShah/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,Bismarrck/tensorflow,Bismarrck/tensorflow,apark263/tensorflow,DavidNorman/tensorflow,brchiu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,renyi533/tensorflow,jbedorf/tensorflow,aselle/tensorflow,annarev/tensorflow,ZhangXinNan/tensorflow,AnishShah/tensorflow,Intel-Corporation/tensorflow,aam-at/tensorflow,snnn/tensorflow,ghchinoy/tensorflow,jalexvig/tensorflow,davidzchen/tensorflow,chemelnucfin/tensorflow,ZhangXinNan/tensorflow,apark263/tensorflow,snnn/tensorflow,annarev/tensorflow,jhseu/tensorflow,asimshankar/tensorflow,tensorflow/tensorflow,adit-chandra/tensorflow,snnn/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,theflofly/tensorflow,snnn/tensorflow,ageron/tensorflow,brchiu/tensorflow,adit-chandra/tensorflow,alshedivat/tensorflow,apark263/tensorflow,seanli9jan/tensorflow,asimshankar/tensorflow,annarev/tensorflow,aam-at/tensorflow,AnishShah/tensorflow,petewarden/tensorflow,jbedorf/tensorflow,ZhangXinNan/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,ageron/tensorflow,manipopopo/tensorflow,annarev/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,sarvex/tensorflow,alshedivat/tensorflow,karllessard/tensorflow,dongjoon-hyun/tensorflow,alsrgv/tensorflow,kevin-coder/tensorflow-fork,frreiss/tensorflow-fred,gautam1858/tensorflow,manipopopo/tensorflow,karllessard/tensorflow,jart/tensorflow,aselle/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,kevin-coder/tensorflow-fork,jalexvig/tensorflow,jart/tensorflow,tensorflow/tensorflow,aselle/tensorflow,tensorflow/tensorflow,jbedorf/tensorflow,jendap/tensorflow,ppwwyyxx/tensorflow,apark263/tensorflow,arborh/tensorflow,hfp/tensorflow-xsmm,xodus7/tensorflow,gunan/tensorflow,karllessard/tensorflow,ppwwyyxx/tensorflow,hfp/tensorflow-xsmm,jendap/tensorflow,kevin-coder/tensorflow-fork,ageron/tensorflow,asimshankar/tensorflow,alsrgv/tensorflow,girving/tensorflow,aselle/tensorflow,arborh/tensorflow,renyi533/tensorflow,paolodedios/tensorflow,manipopopo/tensorflow,jhseu/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,cxxgtxy/tensorflow,aldian/tensorflow,ppwwyyxx/tensorflow,ZhangXinNan/tensorflow,chemelnucfin/tensorflow,ghchinoy/tensorflow,renyi533/tensorflow,brchiu/tensorflow,Bismarrck/tensorflow,kevin-coder/tensorflow-fork,dancingdan/tensorflow,xzturn/tensorflow,aselle/tensorflow,xodus7/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jart/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jbedorf/tensorflow,annarev/tensorflow,ZhangXinNan/tensorflow,gunan/tensorflow,xzturn/tensorflow,aselle/tensorflow,aselle/tensorflow,AnishShah/tensorflow,dongjoon-hyun/tensorflow,aam-at/tensorflow,Bismarrck/tensorflow,gunan/tensorflow,adit-chandra/tensorflow,chemelnucfin/tensorflow,seanli9jan/tensorflow,frreiss/tensorflow-fred,xzturn/tensorflow,freedomtan/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,renyi533/tensorflow,ghchinoy/tensorflow,DavidNorman/tensorflow,kobejean/tensorflow,jalexvig/tensorflow,DavidNorman/tensorflow,manipopopo/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,dancingdan/tensorflow,paolodedios/tensorflow,hehongliang/tensorflow,ghchinoy/tensorflow,alsrgv/tensorflow,renyi533/tensorflow,jalexvig/tensorflow,davidzchen/tensorflow,jbedorf/tensorflow,alshedivat/tensorflow,aldian/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jart/tensorflow,hfp/tensorflow-xsmm,petewarden/tensorflow,adit-chandra/tensorflow,freedomtan/tensorflow,jhseu/tensorflow,renyi533/tensorflow,DavidNorman/tensorflow,AnishShah/tensorflow,cxxgtxy/tensorflow,dancingdan/tensorflow,xodus7/tensorflow,jhseu/tensorflow,manipopopo/tensorflow,annarev/tensorflow,yongtang/tensorflow,aam-at/tensorflow,dancingdan/tensorflow,jendap/tensorflow,tensorflow/tensorflow,petewarden/tensorflow,petewarden/tensorflow,ageron/tensorflow,theflofly/tensorflow,DavidNorman/tensorflow,aam-at/tensorflow,arborh/tensorflow,brchiu/tensorflow,annarev/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,ppwwyyxx/tensorflow,jart/tensorflow,jbedorf/tensorflow,renyi533/tensorflow,jart/tensorflow,ppwwyyxx/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,dongjoon-hyun/tensorflow,aldian/tensorflow,karllessard/tensorflow,apark263/tensorflow,Intel-Corporation/tensorflow,Bismarrck/tensorflow,ghchinoy/tensorflow,frreiss/tensorflow-fred,jendap/tensorflow,ghchinoy/tensorflow,aam-at/tensorflow,aldian/tensorflow,alshedivat/tensorflow,jart/tensorflow,snnn/tensorflow,ZhangXinNan/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,gunan/tensorflow,arborh/tensorflow,aselle/tensorflow,seanli9jan/tensorflow,jart/tensorflow,kobejean/tensorflow,arborh/tensorflow,manipopopo/tensorflow,ppwwyyxx/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,jalexvig/tensorflow,DavidNorman/tensorflow,adit-chandra/tensorflow,dancingdan/tensorflow,alshedivat/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,jendap/tensorflow,freedomtan/tensorflow,asimshankar/tensorflow,jhseu/tensorflow,xzturn/tensorflow,ageron/tensorflow,kevin-coder/tensorflow-fork,yongtang/tensorflow,gautam1858/tensorflow,cxxgtxy/tensorflow,seanli9jan/tensorflow,ageron/tensorflow,asimshankar/tensorflow,frreiss/tensorflow-fred,gunan/tensorflow,alshedivat/tensorflow,ZhangXinNan/tensorflow,paolodedios/tensorflow,dongjoon-hyun/tensorflow,jhseu/tensorflow,theflofly/tensorflow,brchiu/tensorflow,chemelnucfin/tensorflow,ghchinoy/tensorflow,girving/tensorflow,ghchinoy/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,apark263/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,jbedorf/tensorflow,snnn/tensorflow,kevin-coder/tensorflow-fork,girving/tensorflow,tensorflow/tensorflow-pywrap_saved_model,kevin-coder/tensorflow-fork,alshedivat/tensorflow,jbedorf/tensorflow,AnishShah/tensorflow,arborh/tensorflow,petewarden/tensorflow,dancingdan/tensorflow,dongjoon-hyun/tensorflow,petewarden/tensorflow,AnishShah/tensorflow,dongjoon-hyun/tensorflow,kobejean/tensorflow,snnn/tensorflow,arborh/tensorflow,theflofly/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,theflofly/tensorflow,kevin-coder/tensorflow-fork,DavidNorman/tensorflow,dancingdan/tensorflow,jbedorf/tensorflow,girving/tensorflow,gautam1858/tensorflow,gunan/tensorflow,apark263/tensorflow,snnn/tensorflow,seanli9jan/tensorflow,hfp/tensorflow-xsmm,hfp/tensorflow-xsmm,asimshankar/tensorflow,ZhangXinNan/tensorflow,hehongliang/tensorflow,ghchinoy/tensorflow,aam-at/tensorflow,annarev/tensorflow,davidzchen/tensorflow,theflofly/tensorflow,davidzchen/tensorflow,ppwwyyxx/tensorflow,chemelnucfin/tensorflow,girving/tensorflow,dancingdan/tensorflow,jendap/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alsrgv/tensorflow,Intel-Corporation/tensorflow,chemelnucfin/tensorflow,cxxgtxy/tensorflow,asimshankar/tensorflow,renyi533/tensorflow,paolodedios/tensorflow,hfp/tensorflow-xsmm,petewarden/tensorflow,Intel-tensorflow/tensorflow,seanli9jan/tensorflow,paolodedios/tensorflow,xodus7/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,Intel-tensorflow/tensorflow,brchiu/tensorflow,jalexvig/tensorflow,dongjoon-hyun/tensorflow,girving/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,chemelnucfin/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,hehongliang/tensorflow,adit-chandra/tensorflow,karllessard/tensorflow,xzturn/tensorflow,dongjoon-hyun/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-pywrap_saved_model,brchiu/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,gunan/tensorflow,petewarden/tensorflow,aam-at/tensorflow,petewarden/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alsrgv/tensorflow,aselle/tensorflow,ppwwyyxx/tensorflow,alsrgv/tensorflow,ZhangXinNan/tensorflow,sarvex/tensorflow,tensorflow/tensorflow,theflofly/tensorflow,arborh/tensorflow,seanli9jan/tensorflow,aldian/tensorflow,paolodedios/tensorflow,alshedivat/tensorflow,Bismarrck/tensorflow,AnishShah/tensorflow,hehongliang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,aam-at/tensorflow,kevin-coder/tensorflow-fork,sarvex/tensorflow,xodus7/tensorflow,gautam1858/tensorflow,ZhangXinNan/tensorflow,aam-at/tensorflow,girving/tensorflow,Bismarrck/tensorflow,jendap/tensorflow,kevin-coder/tensorflow-fork,DavidNorman/tensorflow,freedomtan/tensorflow,jhseu/tensorflow,arborh/tensorflow,renyi533/tensorflow,xzturn/tensorflow,jendap/tensorflow,kevin-coder/tensorflow-fork,hfp/tensorflow-xsmm,tensorflow/tensorflow-pywrap_tf_optimizer,DavidNorman/tensorflow,kobejean/tensorflow,adit-chandra/tensorflow,Intel-tensorflow/tensorflow,dongjoon-hyun/tensorflow,Intel-tensorflow/tensorflow,hfp/tensorflow-xsmm,jendap/tensorflow,frreiss/tensorflow-fred,seanli9jan/tensorflow,asimshankar/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,aldian/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,dongjoon-hyun/tensorflow,jart/tensorflow,petewarden/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,snnn/tensorflow,girving/tensorflow,jalexvig/tensorflow,manipopopo/tensorflow,hfp/tensorflow-xsmm,sarvex/tensorflow,jalexvig/tensorflow,ZhangXinNan/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,dancingdan/tensorflow,Bismarrck/tensorflow,yongtang/tensorflow,freedomtan/tensorflow,freedomtan/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,snnn/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow,brchiu/tensorflow,xzturn/tensorflow,sarvex/tensorflow,apark263/tensorflow,yongtang/tensorflow,xodus7/tensorflow,jhseu/tensorflow,kobejean/tensorflow,apark263/tensorflow,yongtang/tensorflow,arborh/tensorflow,alsrgv/tensorflow,jalexvig/tensorflow,ageron/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,alsrgv/tensorflow,asimshankar/tensorflow,davidzchen/tensorflow,arborh/tensorflow,yongtang/tensorflow,ghchinoy/tensorflow,yongtang/tensorflow,davidzchen/tensorflow,Intel-Corporation/tensorflow,xodus7/tensorflow,kobejean/tensorflow,girving/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xodus7/tensorflow,jhseu/tensorflow,brchiu/tensorflow,alshedivat/tensorflow,AnishShah/tensorflow,arborh/tensorflow,xzturn/tensorflow,frreiss/tensorflow-fred,annarev/tensorflow,paolodedios/tensorflow,theflofly/tensorflow,jbedorf/tensorflow,frreiss/tensorflow-fred,hehongliang/tensorflow,annarev/tensorflow,snnn/tensorflow,gunan/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,aselle/tensorflow,alshedivat/tensorflow,ageron/tensorflow,frreiss/tensorflow-fred,asimshankar/tensorflow,dongjoon-hyun/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,chemelnucfin/tensorflow,aselle/tensorflow,chemelnucfin/tensorflow,jendap/tensorflow
|
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
- # TODO(mdan): Define a base transformer class that can recognize skip_processing
- # TODO(mdan): All converters are incomplete, especially those that change blocks
+ # Naming conventions:
+ # * each converter should specialize on a single idiom; be consistent with
+ # the Python reference for naming
+ # * all converters inherit core.converter.Base
+ # * module names describe the idiom that the converter covers, plural
+ # * the converter class is named consistent with the module, singular and
+ # includes the word Transformer
+ #
+ # Example:
+ #
+ # lists.py
+ # class ListTransformer(converter.Base)
|
Add a few naming guidelines for the converter library.
|
## Code Before:
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Define a base transformer class that can recognize skip_processing
# TODO(mdan): All converters are incomplete, especially those that change blocks
## Instruction:
Add a few naming guidelines for the converter library.
## Code After:
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Naming conventions:
# * each converter should specialize on a single idiom; be consistent with
# the Python reference for naming
# * all converters inherit core.converter.Base
# * module names describe the idiom that the converter covers, plural
# * the converter class is named consistent with the module, singular and
# includes the word Transformer
#
# Example:
#
# lists.py
# class ListTransformer(converter.Base)
|
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
- # TODO(mdan): Define a base transformer class that can recognize skip_processing
- # TODO(mdan): All converters are incomplete, especially those that change blocks
+ # Naming conventions:
+ # * each converter should specialize on a single idiom; be consistent with
+ # the Python reference for naming
+ # * all converters inherit core.converter.Base
+ # * module names describe the idiom that the converter covers, plural
+ # * the converter class is named consistent with the module, singular and
+ # includes the word Transformer
+ #
+ # Example:
+ #
+ # lists.py
+ # class ListTransformer(converter.Base)
|
668f175fcff4414c6c01de31b8f8d703e9588c5f
|
Optimization.py
|
Optimization.py
|
import copy
import sys
import scipy
import SloppyCell.KeyedList_mod as KeyedList_mod
KeyedList = KeyedList_mod.KeyedList
def fmin_powell_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin_powell(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
def fmin_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
def leastsq_log_params(m, params, *args, **kwargs):
func = m.res_log_params
pmin, msg = scipy.optimize.leastsq(func, scipy.log(params), *args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
|
import copy
import sys
import scipy
import SloppyCell.KeyedList_mod as KeyedList_mod
KeyedList = KeyedList_mod.KeyedList
def fmin_powell_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin_powell(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
else:
return scipy.exp(pmin)
def fmin_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
else:
return scipy.exp(pmin)
def leastsq_log_params(m, params, *args, **kwargs):
func = m.res_log_params
pmin, msg = scipy.optimize.leastsq(func, scipy.log(params), *args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
else:
return scipy.exp(pmin)
|
Fix to handle case where parameters are not passed-in as a KL
|
Fix to handle case where parameters are not passed-in as a KL
|
Python
|
bsd-3-clause
|
GutenkunstLab/SloppyCell,GutenkunstLab/SloppyCell
|
import copy
import sys
import scipy
import SloppyCell.KeyedList_mod as KeyedList_mod
KeyedList = KeyedList_mod.KeyedList
def fmin_powell_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin_powell(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
-
- return pout
+ return pout
+ else:
+ return scipy.exp(pmin)
def fmin_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
- return pout
+ return pout
+ else:
+ return scipy.exp(pmin)
def leastsq_log_params(m, params, *args, **kwargs):
func = m.res_log_params
pmin, msg = scipy.optimize.leastsq(func, scipy.log(params), *args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
- return pout
+ return pout
+ else:
+ return scipy.exp(pmin)
|
Fix to handle case where parameters are not passed-in as a KL
|
## Code Before:
import copy
import sys
import scipy
import SloppyCell.KeyedList_mod as KeyedList_mod
KeyedList = KeyedList_mod.KeyedList
def fmin_powell_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin_powell(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
def fmin_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
def leastsq_log_params(m, params, *args, **kwargs):
func = m.res_log_params
pmin, msg = scipy.optimize.leastsq(func, scipy.log(params), *args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
## Instruction:
Fix to handle case where parameters are not passed-in as a KL
## Code After:
import copy
import sys
import scipy
import SloppyCell.KeyedList_mod as KeyedList_mod
KeyedList = KeyedList_mod.KeyedList
def fmin_powell_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin_powell(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
else:
return scipy.exp(pmin)
def fmin_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
else:
return scipy.exp(pmin)
def leastsq_log_params(m, params, *args, **kwargs):
func = m.res_log_params
pmin, msg = scipy.optimize.leastsq(func, scipy.log(params), *args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
return pout
else:
return scipy.exp(pmin)
|
import copy
import sys
import scipy
import SloppyCell.KeyedList_mod as KeyedList_mod
KeyedList = KeyedList_mod.KeyedList
def fmin_powell_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin_powell(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
-
- return pout
+ return pout
? ++++
+ else:
+ return scipy.exp(pmin)
def fmin_log_params(m, params, *args, **kwargs):
func = m.cost_log_params
pmin = scipy.optimize.fmin(func, scipy.log(params),
*args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
- return pout
+ return pout
? ++++
+ else:
+ return scipy.exp(pmin)
def leastsq_log_params(m, params, *args, **kwargs):
func = m.res_log_params
pmin, msg = scipy.optimize.leastsq(func, scipy.log(params), *args, **kwargs)
if isinstance(params, KeyedList):
pout = params.copy()
pout.update(scipy.exp(pmin))
- return pout
+ return pout
? ++++
+ else:
+ return scipy.exp(pmin)
|
b825ee12fd6abc91b80b8a62886b9c53b82cdeeb
|
test/task_test.py
|
test/task_test.py
|
import doctest
import unittest
import luigi.task
class TaskTest(unittest.TestCase):
def test_tasks_doctest(self):
doctest.testmod(luigi.task)
|
import doctest
import unittest
import luigi.task
import luigi
from datetime import datetime, timedelta
class DummyTask(luigi.Task):
param = luigi.Parameter()
bool_param = luigi.BooleanParameter()
int_param = luigi.IntParameter()
float_param = luigi.FloatParameter()
date_param = luigi.DateParameter()
datehour_param = luigi.DateHourParameter()
timedelta_param = luigi.TimeDeltaParameter()
list_param = luigi.Parameter(is_list=True)
class TaskTest(unittest.TestCase):
def test_tasks_doctest(self):
doctest.testmod(luigi.task)
def test_task_to_str_to_task(self):
params = dict(
param='test',
bool_param=True,
int_param=666,
float_param=123.456,
date_param=datetime(2014, 9, 13).date(),
datehour_param=datetime(2014, 9, 13, 9),
timedelta_param=timedelta(44), # doesn't support seconds
list_param=['in', 'flames'])
original = DummyTask(**params)
other = DummyTask.from_str_params(original.to_str_params(), {})
self.assertEqual(original, other)
if __name__ == '__main__':
unittest.main()
|
Add test for task to str params conversion
|
Add test for task to str params conversion
|
Python
|
apache-2.0
|
soxofaan/luigi,adaitche/luigi,alkemics/luigi,dlstadther/luigi,fw1121/luigi,lungetech/luigi,linsomniac/luigi,moandcompany/luigi,mbruggmann/luigi,vine/luigi,aeron15/luigi,leafjungle/luigi,Houzz/luigi,springcoil/luigi,ViaSat/luigi,Tarrasch/luigi,oldpa/luigi,linsomniac/luigi,dstandish/luigi,kalaidin/luigi,jw0201/luigi,h3biomed/luigi,aeron15/luigi,DomainGroupOSS/luigi,huiyi1990/luigi,alkemics/luigi,dstandish/luigi,laserson/luigi,jw0201/luigi,soxofaan/luigi,graingert/luigi,wakamori/luigi,dylanjbarth/luigi,dlstadther/luigi,alkemics/luigi,kevhill/luigi,Yoone/luigi,SeedScientific/luigi,spotify/luigi,PeteW/luigi,humanlongevity/luigi,JackDanger/luigi,sahitya-pavurala/luigi,glenndmello/luigi,moritzschaefer/luigi,Magnetic/luigi,LamCiuLoeng/luigi,walkers-mv/luigi,moritzschaefer/luigi,dhruvg/luigi,bmaggard/luigi,bowlofstew/luigi,samepage-labs/luigi,fabriziodemaria/luigi,pkexcellent/luigi,kevhill/luigi,dkroy/luigi,JackDanger/luigi,walkers-mv/luigi,ehdr/luigi,LamCiuLoeng/luigi,samepage-labs/luigi,spotify/luigi,ViaSat/luigi,tuulos/luigi,ThQ/luigi,wakamori/luigi,moandcompany/luigi,mbruggmann/luigi,belevtsoff/luigi,neilisaac/luigi,slvnperron/luigi,anyman/luigi,realgo/luigi,ThQ/luigi,ChrisBeaumont/luigi,ThQ/luigi,graingert/luigi,rizzatti/luigi,ZhenxingWu/luigi,torypages/luigi,jamesmcm/luigi,glenndmello/luigi,meyerson/luigi,ChrisBeaumont/luigi,kalaidin/luigi,rizzatti/luigi,mbruggmann/luigi,linearregression/luigi,springcoil/luigi,gpoulin/luigi,huiyi1990/luigi,aeron15/luigi,stephenpascoe/luigi,rayrrr/luigi,aeron15/luigi,lichia/luigi,linearregression/luigi,penelopy/luigi,hadesbox/luigi,ContextLogic/luigi,hellais/luigi,SkyTruth/luigi,theoryno3/luigi,fabriziodemaria/luigi,pkexcellent/luigi,Houzz/luigi,anyman/luigi,bowlofstew/luigi,jw0201/luigi,kalaidin/luigi,moritzschaefer/luigi,joeshaw/luigi,mfcabrera/luigi,ViaSat/luigi,rizzatti/luigi,dlstadther/luigi,thejens/luigi,gpoulin/luigi,mfcabrera/luigi,penelopy/luigi,thejens/luigi,lungetech/luigi,h3biomed/luigi,bmaggard/luigi,fabriziodemaria/luigi,ThQ/luigi,ivannotes/luigi,penelopy/luigi,casey-green/luigi,walkers-mv/luigi,hadesbox/luigi,ChrisBeaumont/luigi,kalaidin/luigi,sahitya-pavurala/luigi,hellais/luigi,laserson/luigi,gpoulin/luigi,altaf-ali/luigi,lichia/luigi,anyman/luigi,stephenpascoe/luigi,meyerson/luigi,lichia/luigi,ViaSat/luigi,Yoone/luigi,walkers-mv/luigi,joeshaw/luigi,realgo/luigi,republic-analytics/luigi,stroykova/luigi,Tarrasch/luigi,vine/luigi,samuell/luigi,foursquare/luigi,JackDanger/luigi,percyfal/luigi,tuulos/luigi,ContextLogic/luigi,upworthy/luigi,jw0201/luigi,stroykova/luigi,Yoone/luigi,lungetech/luigi,pkexcellent/luigi,humanlongevity/luigi,hellais/luigi,Houzz/luigi,lungetech/luigi,Dawny33/luigi,humanlongevity/luigi,hellais/luigi,ZhenxingWu/luigi,hadesbox/luigi,SkyTruth/luigi,dylanjbarth/luigi,edx/luigi,ehdr/luigi,glenndmello/luigi,PeteW/luigi,qpxu007/luigi,harveyxia/luigi,percyfal/luigi,drincruz/luigi,ehdr/luigi,sahitya-pavurala/luigi,SkyTruth/luigi,ZhenxingWu/luigi,huiyi1990/luigi,javrasya/luigi,penelopy/luigi,joeshaw/luigi,dstandish/luigi,riga/luigi,bowlofstew/luigi,foursquare/luigi,mfcabrera/luigi,dhruvg/luigi,glenndmello/luigi,bmaggard/luigi,Tarrasch/luigi,fw1121/luigi,samuell/luigi,moritzschaefer/luigi,javrasya/luigi,riga/luigi,fabriziodemaria/luigi,DomainGroupOSS/luigi,SkyTruth/luigi,neilisaac/luigi,leafjungle/luigi,dkroy/luigi,realgo/luigi,bmaggard/luigi,wakamori/luigi,qpxu007/luigi,dstandish/luigi,casey-green/luigi,dylanjbarth/luigi,republic-analytics/luigi,drincruz/luigi,vine/luigi,linsomniac/luigi,LamCiuLoeng/luigi,graingert/luigi,PeteW/luigi,riga/luigi,DomainGroupOSS/luigi,altaf-ali/luigi,Wattpad/luigi,JackDanger/luigi,javrasya/luigi,casey-green/luigi,fw1121/luigi,Wattpad/luigi,slvnperron/luigi,stephenpascoe/luigi,harveyxia/luigi,adaitche/luigi,springcoil/luigi,leafjungle/luigi,Houzz/luigi,ChrisBeaumont/luigi,dhruvg/luigi,jamesmcm/luigi,casey-green/luigi,stroykova/luigi,samepage-labs/luigi,moandcompany/luigi,torypages/luigi,h3biomed/luigi,samepage-labs/luigi,lichia/luigi,huiyi1990/luigi,dkroy/luigi,altaf-ali/luigi,tuulos/luigi,kevhill/luigi,javrasya/luigi,LamCiuLoeng/luigi,SeedScientific/luigi,meyerson/luigi,samuell/luigi,rizzatti/luigi,ContextLogic/luigi,foursquare/luigi,laserson/luigi,humanlongevity/luigi,theoryno3/luigi,Dawny33/luigi,neilisaac/luigi,SeedScientific/luigi,belevtsoff/luigi,belevtsoff/luigi,percyfal/luigi,mfcabrera/luigi,mbruggmann/luigi,realgo/luigi,vine/luigi,moandcompany/luigi,belevtsoff/luigi,wakamori/luigi,adaitche/luigi,17zuoye/luigi,linsomniac/luigi,meyerson/luigi,spotify/luigi,torypages/luigi,foursquare/luigi,dhruvg/luigi,DomainGroupOSS/luigi,laserson/luigi,jamesmcm/luigi,Magnetic/luigi,spotify/luigi,tuulos/luigi,fw1121/luigi,ivannotes/luigi,soxofaan/luigi,Yoone/luigi,qpxu007/luigi,gpoulin/luigi,17zuoye/luigi,harveyxia/luigi,hadesbox/luigi,h3biomed/luigi,rayrrr/luigi,17zuoye/luigi,alkemics/luigi,linearregression/luigi,ivannotes/luigi,riga/luigi,Magnetic/luigi,upworthy/luigi,theoryno3/luigi,torypages/luigi,harveyxia/luigi,SeedScientific/luigi,adaitche/luigi,slvnperron/luigi,dkroy/luigi,samuell/luigi,qpxu007/luigi,oldpa/luigi,edx/luigi,upworthy/luigi,linearregression/luigi,Dawny33/luigi,thejens/luigi,Wattpad/luigi,bowlofstew/luigi,republic-analytics/luigi,slvnperron/luigi,percyfal/luigi,edx/luigi,upworthy/luigi,dylanjbarth/luigi,pkexcellent/luigi,leafjungle/luigi,graingert/luigi,soxofaan/luigi,kevhill/luigi,springcoil/luigi,rayrrr/luigi,stephenpascoe/luigi,17zuoye/luigi,theoryno3/luigi,drincruz/luigi,thejens/luigi,rayrrr/luigi,jamesmcm/luigi,Magnetic/luigi,ehdr/luigi,Dawny33/luigi,stroykova/luigi,ivannotes/luigi,sahitya-pavurala/luigi,drincruz/luigi,republic-analytics/luigi,Tarrasch/luigi,ContextLogic/luigi,ZhenxingWu/luigi,neilisaac/luigi,dlstadther/luigi,oldpa/luigi,altaf-ali/luigi,oldpa/luigi,anyman/luigi,joeshaw/luigi,edx/luigi,PeteW/luigi
|
import doctest
import unittest
import luigi.task
+ import luigi
+ from datetime import datetime, timedelta
+
+
+ class DummyTask(luigi.Task):
+
+ param = luigi.Parameter()
+ bool_param = luigi.BooleanParameter()
+ int_param = luigi.IntParameter()
+ float_param = luigi.FloatParameter()
+ date_param = luigi.DateParameter()
+ datehour_param = luigi.DateHourParameter()
+ timedelta_param = luigi.TimeDeltaParameter()
+ list_param = luigi.Parameter(is_list=True)
class TaskTest(unittest.TestCase):
-
- def test_tasks_doctest(self):
- doctest.testmod(luigi.task)
+ def test_tasks_doctest(self):
+ doctest.testmod(luigi.task)
+
+ def test_task_to_str_to_task(self):
+ params = dict(
+ param='test',
+ bool_param=True,
+ int_param=666,
+ float_param=123.456,
+ date_param=datetime(2014, 9, 13).date(),
+ datehour_param=datetime(2014, 9, 13, 9),
+ timedelta_param=timedelta(44), # doesn't support seconds
+ list_param=['in', 'flames'])
+
+ original = DummyTask(**params)
+ other = DummyTask.from_str_params(original.to_str_params(), {})
+ self.assertEqual(original, other)
+
+
+ if __name__ == '__main__':
+ unittest.main()
+
|
Add test for task to str params conversion
|
## Code Before:
import doctest
import unittest
import luigi.task
class TaskTest(unittest.TestCase):
def test_tasks_doctest(self):
doctest.testmod(luigi.task)
## Instruction:
Add test for task to str params conversion
## Code After:
import doctest
import unittest
import luigi.task
import luigi
from datetime import datetime, timedelta
class DummyTask(luigi.Task):
param = luigi.Parameter()
bool_param = luigi.BooleanParameter()
int_param = luigi.IntParameter()
float_param = luigi.FloatParameter()
date_param = luigi.DateParameter()
datehour_param = luigi.DateHourParameter()
timedelta_param = luigi.TimeDeltaParameter()
list_param = luigi.Parameter(is_list=True)
class TaskTest(unittest.TestCase):
def test_tasks_doctest(self):
doctest.testmod(luigi.task)
def test_task_to_str_to_task(self):
params = dict(
param='test',
bool_param=True,
int_param=666,
float_param=123.456,
date_param=datetime(2014, 9, 13).date(),
datehour_param=datetime(2014, 9, 13, 9),
timedelta_param=timedelta(44), # doesn't support seconds
list_param=['in', 'flames'])
original = DummyTask(**params)
other = DummyTask.from_str_params(original.to_str_params(), {})
self.assertEqual(original, other)
if __name__ == '__main__':
unittest.main()
|
import doctest
import unittest
import luigi.task
+ import luigi
+ from datetime import datetime, timedelta
+
+
+ class DummyTask(luigi.Task):
+
+ param = luigi.Parameter()
+ bool_param = luigi.BooleanParameter()
+ int_param = luigi.IntParameter()
+ float_param = luigi.FloatParameter()
+ date_param = luigi.DateParameter()
+ datehour_param = luigi.DateHourParameter()
+ timedelta_param = luigi.TimeDeltaParameter()
+ list_param = luigi.Parameter(is_list=True)
class TaskTest(unittest.TestCase):
-
+
- def test_tasks_doctest(self):
? ^
+ def test_tasks_doctest(self):
? ^^^^
- doctest.testmod(luigi.task)
? ^^
+ doctest.testmod(luigi.task)
? ^^^^^^^^
+
+ def test_task_to_str_to_task(self):
+ params = dict(
+ param='test',
+ bool_param=True,
+ int_param=666,
+ float_param=123.456,
+ date_param=datetime(2014, 9, 13).date(),
+ datehour_param=datetime(2014, 9, 13, 9),
+ timedelta_param=timedelta(44), # doesn't support seconds
+ list_param=['in', 'flames'])
+
+ original = DummyTask(**params)
+ other = DummyTask.from_str_params(original.to_str_params(), {})
+ self.assertEqual(original, other)
+
+
+ if __name__ == '__main__':
+ unittest.main()
|
c0c73dd73f13e8d1d677cc2d7cad5c2f63217751
|
python/tests/test_rmm.py
|
python/tests/test_rmm.py
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from libgdf_cffi import libgdf
from librmm_cffi import ffi, librmm
from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
from .utils import buffer_as_bits
_dtypes = [np.int32]
_nelems = [128]
@pytest.fixture(scope="module")
def rmm():
print("initialize librmm")
assert librmm.initialize() == librmm.RMM_SUCCESS
yield librmm
print("finalize librmm")
assert librmm.finalize() == librmm.RMM_SUCCESS
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem, rmm):
expect_fn = np.add
test_fn = libgdf.gdf_add_generic
#import cffi
#ffi = cffi.FFI()
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from librmm_cffi import librmm as rmm
from .utils import gen_rand
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
|
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
|
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
|
Python
|
apache-2.0
|
gpuopenanalytics/libgdf,gpuopenanalytics/libgdf,gpuopenanalytics/libgdf,gpuopenanalytics/libgdf
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
- from libgdf_cffi import libgdf
- from librmm_cffi import ffi, librmm
+ from librmm_cffi import librmm as rmm
+ from .utils import gen_rand
- from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
- from .utils import buffer_as_bits
-
_dtypes = [np.int32]
+ _nelems = [1, 2, 7, 8, 9, 32, 128]
- _nelems = [128]
-
- @pytest.fixture(scope="module")
- def rmm():
- print("initialize librmm")
- assert librmm.initialize() == librmm.RMM_SUCCESS
- yield librmm
- print("finalize librmm")
- assert librmm.finalize() == librmm.RMM_SUCCESS
-
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
- def test_rmm_alloc(dtype, nelem, rmm):
+ def test_rmm_alloc(dtype, nelem):
-
- expect_fn = np.add
- test_fn = libgdf.gdf_add_generic
-
- #import cffi
- #ffi = cffi.FFI()
-
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
- assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
- assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
-
|
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
|
## Code Before:
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from libgdf_cffi import libgdf
from librmm_cffi import ffi, librmm
from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
from .utils import buffer_as_bits
_dtypes = [np.int32]
_nelems = [128]
@pytest.fixture(scope="module")
def rmm():
print("initialize librmm")
assert librmm.initialize() == librmm.RMM_SUCCESS
yield librmm
print("finalize librmm")
assert librmm.finalize() == librmm.RMM_SUCCESS
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem, rmm):
expect_fn = np.add
test_fn = libgdf.gdf_add_generic
#import cffi
#ffi = cffi.FFI()
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
## Instruction:
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
## Code After:
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from librmm_cffi import librmm as rmm
from .utils import gen_rand
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
- from libgdf_cffi import libgdf
- from librmm_cffi import ffi, librmm
? -----
+ from librmm_cffi import librmm as rmm
? +++++++
+ from .utils import gen_rand
- from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
- from .utils import buffer_as_bits
-
_dtypes = [np.int32]
+ _nelems = [1, 2, 7, 8, 9, 32, 128]
- _nelems = [128]
-
- @pytest.fixture(scope="module")
- def rmm():
- print("initialize librmm")
- assert librmm.initialize() == librmm.RMM_SUCCESS
- yield librmm
- print("finalize librmm")
- assert librmm.finalize() == librmm.RMM_SUCCESS
-
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
- def test_rmm_alloc(dtype, nelem, rmm):
? -----
+ def test_rmm_alloc(dtype, nelem):
-
- expect_fn = np.add
- test_fn = libgdf.gdf_add_generic
-
- #import cffi
- #ffi = cffi.FFI()
-
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
-
- assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
- assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
|
db08c5ae962c2e66c8ad2e668f530d08934200af
|
geometry.py
|
geometry.py
|
from geom2d import *
l1 = []
for i in range(-5, 6):
l1.append(Point(i, i*i))
l2 = []
for el in l1:
l2.append(Point(el.x, -el.y))
print(l1)
print(l2)
# List comprehension
l1c = [Point(i, i*i) for i in range(-5, 6)]
l2c = [Point(el.x, -el.y) for el in l1c]
print("List comprehension")
print(l1c)
print(l2c)
|
from geom2d import *
l1 = list(map(lambda i: Point(i, i*i), range(-5, 6)))
# l2 = list(map(lambda p: Point(p.x, -p.y), l1))
# l2 = list(filter(lambda p: p.x > 0, l1))
l2 = list(filter(lambda p: p.x % 2 == 0, l1))
print(l1)
print(l2)
|
Work with lists in functional way (map, filter)
|
Work with lists in functional way (map, filter)
|
Python
|
apache-2.0
|
maciekp85/python-for-testers
|
from geom2d import *
- l1 = []
+ l1 = list(map(lambda i: Point(i, i*i), range(-5, 6)))
+ # l2 = list(map(lambda p: Point(p.x, -p.y), l1))
- for i in range(-5, 6):
- l1.append(Point(i, i*i))
+ # l2 = list(filter(lambda p: p.x > 0, l1))
+ l2 = list(filter(lambda p: p.x % 2 == 0, l1))
- l2 = []
-
- for el in l1:
- l2.append(Point(el.x, -el.y))
print(l1)
print(l2)
- # List comprehension
- l1c = [Point(i, i*i) for i in range(-5, 6)]
- l2c = [Point(el.x, -el.y) for el in l1c]
-
- print("List comprehension")
- print(l1c)
- print(l2c)
-
|
Work with lists in functional way (map, filter)
|
## Code Before:
from geom2d import *
l1 = []
for i in range(-5, 6):
l1.append(Point(i, i*i))
l2 = []
for el in l1:
l2.append(Point(el.x, -el.y))
print(l1)
print(l2)
# List comprehension
l1c = [Point(i, i*i) for i in range(-5, 6)]
l2c = [Point(el.x, -el.y) for el in l1c]
print("List comprehension")
print(l1c)
print(l2c)
## Instruction:
Work with lists in functional way (map, filter)
## Code After:
from geom2d import *
l1 = list(map(lambda i: Point(i, i*i), range(-5, 6)))
# l2 = list(map(lambda p: Point(p.x, -p.y), l1))
# l2 = list(filter(lambda p: p.x > 0, l1))
l2 = list(filter(lambda p: p.x % 2 == 0, l1))
print(l1)
print(l2)
|
from geom2d import *
- l1 = []
+ l1 = list(map(lambda i: Point(i, i*i), range(-5, 6)))
+ # l2 = list(map(lambda p: Point(p.x, -p.y), l1))
- for i in range(-5, 6):
- l1.append(Point(i, i*i))
+ # l2 = list(filter(lambda p: p.x > 0, l1))
+ l2 = list(filter(lambda p: p.x % 2 == 0, l1))
- l2 = []
-
- for el in l1:
- l2.append(Point(el.x, -el.y))
print(l1)
print(l2)
-
- # List comprehension
- l1c = [Point(i, i*i) for i in range(-5, 6)]
- l2c = [Point(el.x, -el.y) for el in l1c]
-
- print("List comprehension")
- print(l1c)
- print(l2c)
|
de69c4048fe8533185a4eca6f98c7d74967618bf
|
opentreemap/opentreemap/util.py
|
opentreemap/opentreemap/util.py
|
from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, *args2, **kwargs2)
return routed
def json_from_request(request):
body = request.body
if body:
return json.loads(body)
else:
return None
def merge_view_contexts(viewfns):
def wrapped(*args, **kwargs):
context = {}
for viewfn in viewfns:
context.update(viewfn(*args, **kwargs))
return context
return wrapped
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import json
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse, HttpResponseRedirect, Http404
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
if method not in kwargs:
raise Http404()
else:
req_method = kwargs[method]
return req_method(request, *args2, **kwargs2)
return routed
def json_from_request(request):
body = request.body
if body:
return json.loads(body)
else:
return None
def merge_view_contexts(viewfns):
def wrapped(*args, **kwargs):
context = {}
for viewfn in viewfns:
context.update(viewfn(*args, **kwargs))
return context
return wrapped
|
Return a 404, not a 500 on a verb mismatch
|
Return a 404, not a 500 on a verb mismatch
Fixes #1101
|
Python
|
agpl-3.0
|
maurizi/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,RickMohr/otm-core,recklessromeo/otm-core,recklessromeo/otm-core,maurizi/otm-core,RickMohr/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core
|
+ from __future__ import print_function
+ from __future__ import unicode_literals
+ from __future__ import division
+
+ import json
+
from django.views.decorators.csrf import csrf_exempt
- import json
+ from django.http import HttpResponse, HttpResponseRedirect, Http404
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
+
+ if method not in kwargs:
+ raise Http404()
+ else:
- req_method = kwargs[method]
+ req_method = kwargs[method]
- return req_method(request, *args2, **kwargs2)
+ return req_method(request, *args2, **kwargs2)
return routed
def json_from_request(request):
body = request.body
if body:
return json.loads(body)
else:
return None
def merge_view_contexts(viewfns):
def wrapped(*args, **kwargs):
context = {}
for viewfn in viewfns:
context.update(viewfn(*args, **kwargs))
return context
return wrapped
|
Return a 404, not a 500 on a verb mismatch
|
## Code Before:
from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, *args2, **kwargs2)
return routed
def json_from_request(request):
body = request.body
if body:
return json.loads(body)
else:
return None
def merge_view_contexts(viewfns):
def wrapped(*args, **kwargs):
context = {}
for viewfn in viewfns:
context.update(viewfn(*args, **kwargs))
return context
return wrapped
## Instruction:
Return a 404, not a 500 on a verb mismatch
## Code After:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import json
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse, HttpResponseRedirect, Http404
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
if method not in kwargs:
raise Http404()
else:
req_method = kwargs[method]
return req_method(request, *args2, **kwargs2)
return routed
def json_from_request(request):
body = request.body
if body:
return json.loads(body)
else:
return None
def merge_view_contexts(viewfns):
def wrapped(*args, **kwargs):
context = {}
for viewfn in viewfns:
context.update(viewfn(*args, **kwargs))
return context
return wrapped
|
+ from __future__ import print_function
+ from __future__ import unicode_literals
+ from __future__ import division
+
+ import json
+
from django.views.decorators.csrf import csrf_exempt
- import json
+ from django.http import HttpResponse, HttpResponseRedirect, Http404
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
+
+ if method not in kwargs:
+ raise Http404()
+ else:
- req_method = kwargs[method]
+ req_method = kwargs[method]
? ++++
- return req_method(request, *args2, **kwargs2)
+ return req_method(request, *args2, **kwargs2)
? ++++
return routed
def json_from_request(request):
body = request.body
if body:
return json.loads(body)
else:
return None
def merge_view_contexts(viewfns):
def wrapped(*args, **kwargs):
context = {}
for viewfn in viewfns:
context.update(viewfn(*args, **kwargs))
return context
return wrapped
|
376b8aa5b77066e06c17f41d65fe32a3c2bdef1f
|
geo.py
|
geo.py
|
import mmap
import yaml
print("---------------------------- geo --")
print("-- by [email protected] --")
print("-----------------------------------")
doc_in = "./001-v2-doc.md"
class geoReader():
def __init__(self, doc_in):
self.doc_in = doc_in
self.header = None
def __enter__(self):
"""Open the file.
"""
self.f = open(self.doc_in, 'r')
return self
def __exit__(self, type, value, traceback):
"""Close the file.
"""
self.f.close()
def parseHeader(self):
"""Parse the header of the file.
"""
s = mmap.mmap(self.f.fileno(), 0, access=mmap.ACCESS_READ)
self.header_limit = s.find(b'---')
if self.header_limit != -1:
self.header = yaml.load(s[0:self.header_limit])
print(self.header['name'])
else:
print("Cannot load the header")
# Read the document
with geoReader(doc_in) as g:
g.parseHeader()
|
import mmap
import yaml
print("---------------------------- geo --")
print("-- by [email protected] --")
print("-----------------------------------")
doc_in = "./001-v2-doc.md"
class geoReader():
def __init__(self, doc_in):
self.doc_in = doc_in
self.header = None
self.header_limit = -1
def __enter__(self):
"""Open the file.
"""
self.f = open(self.doc_in, 'r')
return self
def __exit__(self, type, value, traceback):
"""Close the file.
"""
self.f.close()
def parseHeader(self):
"""Parse the header of the file.
"""
s = mmap.mmap(self.f.fileno(), 0, access=mmap.ACCESS_READ)
self.header_limit = s.find(b'---')
if self.header_limit != -1:
self.header = yaml.load(s[0:self.header_limit])
print(self.header['name'])
else:
print("Cannot load the header")
# Read the document
with geoReader(doc_in) as g:
g.parseHeader()
|
Add a default value to the header limit
|
Add a default value to the header limit
|
Python
|
mit
|
a2ohm/geo
|
import mmap
import yaml
print("---------------------------- geo --")
print("-- by [email protected] --")
print("-----------------------------------")
doc_in = "./001-v2-doc.md"
class geoReader():
def __init__(self, doc_in):
self.doc_in = doc_in
self.header = None
+ self.header_limit = -1
def __enter__(self):
"""Open the file.
"""
self.f = open(self.doc_in, 'r')
return self
def __exit__(self, type, value, traceback):
"""Close the file.
"""
self.f.close()
def parseHeader(self):
"""Parse the header of the file.
"""
s = mmap.mmap(self.f.fileno(), 0, access=mmap.ACCESS_READ)
self.header_limit = s.find(b'---')
if self.header_limit != -1:
self.header = yaml.load(s[0:self.header_limit])
print(self.header['name'])
else:
print("Cannot load the header")
# Read the document
with geoReader(doc_in) as g:
g.parseHeader()
|
Add a default value to the header limit
|
## Code Before:
import mmap
import yaml
print("---------------------------- geo --")
print("-- by [email protected] --")
print("-----------------------------------")
doc_in = "./001-v2-doc.md"
class geoReader():
def __init__(self, doc_in):
self.doc_in = doc_in
self.header = None
def __enter__(self):
"""Open the file.
"""
self.f = open(self.doc_in, 'r')
return self
def __exit__(self, type, value, traceback):
"""Close the file.
"""
self.f.close()
def parseHeader(self):
"""Parse the header of the file.
"""
s = mmap.mmap(self.f.fileno(), 0, access=mmap.ACCESS_READ)
self.header_limit = s.find(b'---')
if self.header_limit != -1:
self.header = yaml.load(s[0:self.header_limit])
print(self.header['name'])
else:
print("Cannot load the header")
# Read the document
with geoReader(doc_in) as g:
g.parseHeader()
## Instruction:
Add a default value to the header limit
## Code After:
import mmap
import yaml
print("---------------------------- geo --")
print("-- by [email protected] --")
print("-----------------------------------")
doc_in = "./001-v2-doc.md"
class geoReader():
def __init__(self, doc_in):
self.doc_in = doc_in
self.header = None
self.header_limit = -1
def __enter__(self):
"""Open the file.
"""
self.f = open(self.doc_in, 'r')
return self
def __exit__(self, type, value, traceback):
"""Close the file.
"""
self.f.close()
def parseHeader(self):
"""Parse the header of the file.
"""
s = mmap.mmap(self.f.fileno(), 0, access=mmap.ACCESS_READ)
self.header_limit = s.find(b'---')
if self.header_limit != -1:
self.header = yaml.load(s[0:self.header_limit])
print(self.header['name'])
else:
print("Cannot load the header")
# Read the document
with geoReader(doc_in) as g:
g.parseHeader()
|
import mmap
import yaml
print("---------------------------- geo --")
print("-- by [email protected] --")
print("-----------------------------------")
doc_in = "./001-v2-doc.md"
class geoReader():
def __init__(self, doc_in):
self.doc_in = doc_in
self.header = None
+ self.header_limit = -1
def __enter__(self):
"""Open the file.
"""
self.f = open(self.doc_in, 'r')
return self
def __exit__(self, type, value, traceback):
"""Close the file.
"""
self.f.close()
def parseHeader(self):
"""Parse the header of the file.
"""
s = mmap.mmap(self.f.fileno(), 0, access=mmap.ACCESS_READ)
self.header_limit = s.find(b'---')
if self.header_limit != -1:
self.header = yaml.load(s[0:self.header_limit])
print(self.header['name'])
else:
print("Cannot load the header")
# Read the document
with geoReader(doc_in) as g:
g.parseHeader()
|
076a129a8468a6c85c8b55a752aca87a60f90d79
|
ehrcorral/compressions.py
|
ehrcorral/compressions.py
|
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
from jellyfish import soundex, nysiis, metaphone
from metaphone import doublemetaphone as dmetaphone
def first_letter(name):
"""A simple name compression that returns the first letter of the name.
Args:
name (str): A forename, surname, or other name.
Returns:
(str): The upper case of the first letter of the name
"""
return name[0].upper()
|
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
from jellyfish import soundex, nysiis, metaphone
from metaphone import doublemetaphone as dmetaphone
def first_letter(name):
"""A simple name compression that returns the first letter of the name.
Args:
name (str): A forename, surname, or other name.
Returns:
(str): The upper case of the first letter of the name
"""
return name[0].upper() if name else unicode('')
|
Add if/else to first_letter compression to handle empty names
|
Add if/else to first_letter compression to handle empty names
|
Python
|
isc
|
nsh87/ehrcorral
|
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
from jellyfish import soundex, nysiis, metaphone
from metaphone import doublemetaphone as dmetaphone
def first_letter(name):
"""A simple name compression that returns the first letter of the name.
Args:
name (str): A forename, surname, or other name.
Returns:
(str): The upper case of the first letter of the name
"""
- return name[0].upper()
+ return name[0].upper() if name else unicode('')
|
Add if/else to first_letter compression to handle empty names
|
## Code Before:
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
from jellyfish import soundex, nysiis, metaphone
from metaphone import doublemetaphone as dmetaphone
def first_letter(name):
"""A simple name compression that returns the first letter of the name.
Args:
name (str): A forename, surname, or other name.
Returns:
(str): The upper case of the first letter of the name
"""
return name[0].upper()
## Instruction:
Add if/else to first_letter compression to handle empty names
## Code After:
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
from jellyfish import soundex, nysiis, metaphone
from metaphone import doublemetaphone as dmetaphone
def first_letter(name):
"""A simple name compression that returns the first letter of the name.
Args:
name (str): A forename, surname, or other name.
Returns:
(str): The upper case of the first letter of the name
"""
return name[0].upper() if name else unicode('')
|
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
from jellyfish import soundex, nysiis, metaphone
from metaphone import doublemetaphone as dmetaphone
def first_letter(name):
"""A simple name compression that returns the first letter of the name.
Args:
name (str): A forename, surname, or other name.
Returns:
(str): The upper case of the first letter of the name
"""
- return name[0].upper()
+ return name[0].upper() if name else unicode('')
|
34812fe2deec64229efd4119640f3c2ddf0ed415
|
visualize.py
|
visualize.py
|
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
nodes = {}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
nx.draw_networkx(g, with_labels=True)
# add labels
#nx.draw_networkx_labels(g, pos, labels['nodes'])
#nx.draw_networkx_edge_labels(g, pos, labels['edges'])
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
|
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
pos = nx.drawing.spring_layout(
g,
scale=10.0,
)
nx.draw_networkx(
g,
pos=pos,
with_labels=True,
font_size=8,
)
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
|
Make the sprint layout a bit easier to look at
|
Make the sprint layout a bit easier to look at
|
Python
|
mit
|
jacksontj/dnms,jacksontj/dnms
|
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
- nodes = {}
-
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
- nx.draw_networkx(g, with_labels=True)
-
- # add labels
- #nx.draw_networkx_labels(g, pos, labels['nodes'])
- #nx.draw_networkx_edge_labels(g, pos, labels['edges'])
+ pos = nx.drawing.spring_layout(
+ g,
+ scale=10.0,
+ )
+ nx.draw_networkx(
+ g,
+ pos=pos,
+ with_labels=True,
+ font_size=8,
+ )
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
|
Make the sprint layout a bit easier to look at
|
## Code Before:
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
nodes = {}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
nx.draw_networkx(g, with_labels=True)
# add labels
#nx.draw_networkx_labels(g, pos, labels['nodes'])
#nx.draw_networkx_edge_labels(g, pos, labels['edges'])
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
## Instruction:
Make the sprint layout a bit easier to look at
## Code After:
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
pos = nx.drawing.spring_layout(
g,
scale=10.0,
)
nx.draw_networkx(
g,
pos=pos,
with_labels=True,
font_size=8,
)
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
|
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
- nodes = {}
-
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
- nx.draw_networkx(g, with_labels=True)
-
- # add labels
- #nx.draw_networkx_labels(g, pos, labels['nodes'])
- #nx.draw_networkx_edge_labels(g, pos, labels['edges'])
+ pos = nx.drawing.spring_layout(
+ g,
+ scale=10.0,
+ )
+ nx.draw_networkx(
+ g,
+ pos=pos,
+ with_labels=True,
+ font_size=8,
+ )
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
|
931a858dc1cfde1652d21e1ccd60a82dde683ce3
|
moxie/butterfield.py
|
moxie/butterfield.py
|
import os
import json
import asyncio
from butterfield.utils import at_bot
from aiodocker import Docker
from aiocore import Service
WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888")
@asyncio.coroutine
def events(bot):
docker = Docker()
events = docker.events
events.saferun()
stream = events.listen()
while True:
el = yield from stream.get()
yield from bot.post("#cron", "`{}`".format(str(el)))
@asyncio.coroutine
@at_bot
def run(bot, message: "message"):
runner = Service.resolve("moxie.cores.run.RunService")
text = message.get("text", "")
if text == "":
yield from bot.post(message['channel'], "Invalid request")
cmd, arg = text.split(" ", 1)
if cmd == "run":
job = arg
yield from bot.post(
message['channel'], "Doing bringup of {}".format(job))
try:
yield from runner.run(job)
except ValueError as e:
yield from bot.post(
message['channel'],
"Gah, {job} failed - {e}".format(e=e, job=job)
)
return
yield from bot.post(message['channel'],
"Job {job} online - {webroot}/container/{job}/".format(
webroot=WEB_ROOT, job=job))
|
import os
import json
import asyncio
from butterfield.utils import at_bot
from aiodocker import Docker
from aiocore import Service
WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888")
@asyncio.coroutine
def events(bot):
docker = Docker()
events = docker.events
events.saferun()
stream = events.listen()
while True:
el = yield from stream.get()
yield from bot.post("#cron", "`{}`".format(str(el)))
@asyncio.coroutine
@at_bot
def run(bot, message: "message"):
runner = Service.resolve("moxie.cores.run.RunService")
text = message.get("text", "")
if text == "":
yield from bot.post(message['channel'], "Invalid request")
cmd, arg = text.split(" ", 1)
if cmd == "run":
job = arg
yield from bot.post(
message['channel'], "Doing bringup of {}".format(job))
try:
yield from runner.run(job)
except ValueError as e:
yield from bot.post(
message['channel'],
"Gah, {job} failed - {e}".format(e=e, job=job)
)
return
yield from bot.post(message['channel'],
"Job {job} online - {webroot}/container/{job}/".format(
webroot=WEB_ROOT, job=job))
elif cmd == "yo":
yield from bot.post(
message['channel'], "Yo {}".format(message['user']))
|
Add simple "yo" bot command
|
Add simple "yo" bot command
|
Python
|
mit
|
paultag/moxie,loandy/moxie,mileswwatkins/moxie,mileswwatkins/moxie,paultag/moxie,loandy/moxie,loandy/moxie,paultag/moxie,rshorey/moxie,rshorey/moxie,rshorey/moxie,mileswwatkins/moxie
|
import os
import json
import asyncio
from butterfield.utils import at_bot
from aiodocker import Docker
from aiocore import Service
WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888")
@asyncio.coroutine
def events(bot):
docker = Docker()
events = docker.events
events.saferun()
stream = events.listen()
while True:
el = yield from stream.get()
yield from bot.post("#cron", "`{}`".format(str(el)))
@asyncio.coroutine
@at_bot
def run(bot, message: "message"):
runner = Service.resolve("moxie.cores.run.RunService")
text = message.get("text", "")
if text == "":
yield from bot.post(message['channel'], "Invalid request")
cmd, arg = text.split(" ", 1)
if cmd == "run":
job = arg
yield from bot.post(
message['channel'], "Doing bringup of {}".format(job))
try:
yield from runner.run(job)
except ValueError as e:
yield from bot.post(
message['channel'],
"Gah, {job} failed - {e}".format(e=e, job=job)
)
return
yield from bot.post(message['channel'],
"Job {job} online - {webroot}/container/{job}/".format(
webroot=WEB_ROOT, job=job))
+ elif cmd == "yo":
+ yield from bot.post(
+ message['channel'], "Yo {}".format(message['user']))
|
Add simple "yo" bot command
|
## Code Before:
import os
import json
import asyncio
from butterfield.utils import at_bot
from aiodocker import Docker
from aiocore import Service
WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888")
@asyncio.coroutine
def events(bot):
docker = Docker()
events = docker.events
events.saferun()
stream = events.listen()
while True:
el = yield from stream.get()
yield from bot.post("#cron", "`{}`".format(str(el)))
@asyncio.coroutine
@at_bot
def run(bot, message: "message"):
runner = Service.resolve("moxie.cores.run.RunService")
text = message.get("text", "")
if text == "":
yield from bot.post(message['channel'], "Invalid request")
cmd, arg = text.split(" ", 1)
if cmd == "run":
job = arg
yield from bot.post(
message['channel'], "Doing bringup of {}".format(job))
try:
yield from runner.run(job)
except ValueError as e:
yield from bot.post(
message['channel'],
"Gah, {job} failed - {e}".format(e=e, job=job)
)
return
yield from bot.post(message['channel'],
"Job {job} online - {webroot}/container/{job}/".format(
webroot=WEB_ROOT, job=job))
## Instruction:
Add simple "yo" bot command
## Code After:
import os
import json
import asyncio
from butterfield.utils import at_bot
from aiodocker import Docker
from aiocore import Service
WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888")
@asyncio.coroutine
def events(bot):
docker = Docker()
events = docker.events
events.saferun()
stream = events.listen()
while True:
el = yield from stream.get()
yield from bot.post("#cron", "`{}`".format(str(el)))
@asyncio.coroutine
@at_bot
def run(bot, message: "message"):
runner = Service.resolve("moxie.cores.run.RunService")
text = message.get("text", "")
if text == "":
yield from bot.post(message['channel'], "Invalid request")
cmd, arg = text.split(" ", 1)
if cmd == "run":
job = arg
yield from bot.post(
message['channel'], "Doing bringup of {}".format(job))
try:
yield from runner.run(job)
except ValueError as e:
yield from bot.post(
message['channel'],
"Gah, {job} failed - {e}".format(e=e, job=job)
)
return
yield from bot.post(message['channel'],
"Job {job} online - {webroot}/container/{job}/".format(
webroot=WEB_ROOT, job=job))
elif cmd == "yo":
yield from bot.post(
message['channel'], "Yo {}".format(message['user']))
|
import os
import json
import asyncio
from butterfield.utils import at_bot
from aiodocker import Docker
from aiocore import Service
WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888")
@asyncio.coroutine
def events(bot):
docker = Docker()
events = docker.events
events.saferun()
stream = events.listen()
while True:
el = yield from stream.get()
yield from bot.post("#cron", "`{}`".format(str(el)))
@asyncio.coroutine
@at_bot
def run(bot, message: "message"):
runner = Service.resolve("moxie.cores.run.RunService")
text = message.get("text", "")
if text == "":
yield from bot.post(message['channel'], "Invalid request")
cmd, arg = text.split(" ", 1)
if cmd == "run":
job = arg
yield from bot.post(
message['channel'], "Doing bringup of {}".format(job))
try:
yield from runner.run(job)
except ValueError as e:
yield from bot.post(
message['channel'],
"Gah, {job} failed - {e}".format(e=e, job=job)
)
return
yield from bot.post(message['channel'],
"Job {job} online - {webroot}/container/{job}/".format(
webroot=WEB_ROOT, job=job))
+ elif cmd == "yo":
+ yield from bot.post(
+ message['channel'], "Yo {}".format(message['user']))
|
90fa23d1d1b2497d65507b7930323b118f512a25
|
disco_aws_automation/disco_acm.py
|
disco_aws_automation/disco_acm.py
|
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except (botocore.exceptions.EndpointConnectionError, botocore.vendored.requests.exceptions.ConnectionError):
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except botocore.exceptions.EndpointConnectionError:
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
Revert "Swallow proxy exception from requests"
|
Revert "Swallow proxy exception from requests"
This reverts commit 8d9ccbb2bbde7c2f8dbe60b90f730d87b924d86e.
|
Python
|
bsd-2-clause
|
amplifylitco/asiaq,amplifylitco/asiaq,amplifylitco/asiaq
|
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
- except (botocore.exceptions.EndpointConnectionError, botocore.vendored.requests.exceptions.ConnectionError):
+ except botocore.exceptions.EndpointConnectionError:
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
Revert "Swallow proxy exception from requests"
|
## Code Before:
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except (botocore.exceptions.EndpointConnectionError, botocore.vendored.requests.exceptions.ConnectionError):
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
## Instruction:
Revert "Swallow proxy exception from requests"
## Code After:
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except botocore.exceptions.EndpointConnectionError:
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
- except (botocore.exceptions.EndpointConnectionError, botocore.vendored.requests.exceptions.ConnectionError):
+ except botocore.exceptions.EndpointConnectionError:
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
9510a0da5a6fee780e16db8f128f7c24bdb579d4
|
tests/test_post_import_hooks.py
|
tests/test_post_import_hooks.py
|
from __future__ import print_function
import unittest
import wrapt
class TestPostImportHooks(unittest.TestCase):
def test_simple(self):
invoked = []
@wrapt.when_imported('socket')
def hook_socket(module):
self.assertEqual(module.__name__, 'socket')
invoked.append(1)
self.assertEqual(len(invoked), 0)
import socket
self.assertEqual(len(invoked), 1)
if __name__ == '__main__':
unittest.main()
|
from __future__ import print_function
import unittest
import wrapt
class TestPostImportHooks(unittest.TestCase):
def test_simple(self):
invoked = []
@wrapt.when_imported('this')
def hook_this(module):
self.assertEqual(module.__name__, 'this')
invoked.append(1)
self.assertEqual(len(invoked), 0)
import this
self.assertEqual(len(invoked), 1)
if __name__ == '__main__':
unittest.main()
|
Adjust test to use different module as socket imported by coverage tools.
|
Adjust test to use different module as socket imported by coverage tools.
|
Python
|
bsd-2-clause
|
linglaiyao1314/wrapt,pombredanne/python-lazy-object-proxy,linglaiyao1314/wrapt,pombredanne/wrapt,akash1808/wrapt,pombredanne/wrapt,github4ry/wrapt,wujuguang/wrapt,pombredanne/python-lazy-object-proxy,akash1808/wrapt,ionelmc/python-lazy-object-proxy,ionelmc/python-lazy-object-proxy,github4ry/wrapt,GrahamDumpleton/wrapt,GrahamDumpleton/wrapt,wujuguang/wrapt
|
from __future__ import print_function
import unittest
import wrapt
class TestPostImportHooks(unittest.TestCase):
def test_simple(self):
invoked = []
- @wrapt.when_imported('socket')
+ @wrapt.when_imported('this')
- def hook_socket(module):
+ def hook_this(module):
- self.assertEqual(module.__name__, 'socket')
+ self.assertEqual(module.__name__, 'this')
invoked.append(1)
self.assertEqual(len(invoked), 0)
- import socket
+ import this
self.assertEqual(len(invoked), 1)
if __name__ == '__main__':
unittest.main()
|
Adjust test to use different module as socket imported by coverage tools.
|
## Code Before:
from __future__ import print_function
import unittest
import wrapt
class TestPostImportHooks(unittest.TestCase):
def test_simple(self):
invoked = []
@wrapt.when_imported('socket')
def hook_socket(module):
self.assertEqual(module.__name__, 'socket')
invoked.append(1)
self.assertEqual(len(invoked), 0)
import socket
self.assertEqual(len(invoked), 1)
if __name__ == '__main__':
unittest.main()
## Instruction:
Adjust test to use different module as socket imported by coverage tools.
## Code After:
from __future__ import print_function
import unittest
import wrapt
class TestPostImportHooks(unittest.TestCase):
def test_simple(self):
invoked = []
@wrapt.when_imported('this')
def hook_this(module):
self.assertEqual(module.__name__, 'this')
invoked.append(1)
self.assertEqual(len(invoked), 0)
import this
self.assertEqual(len(invoked), 1)
if __name__ == '__main__':
unittest.main()
|
from __future__ import print_function
import unittest
import wrapt
class TestPostImportHooks(unittest.TestCase):
def test_simple(self):
invoked = []
- @wrapt.when_imported('socket')
? -----
+ @wrapt.when_imported('this')
? +++
- def hook_socket(module):
? -----
+ def hook_this(module):
? +++
- self.assertEqual(module.__name__, 'socket')
? -----
+ self.assertEqual(module.__name__, 'this')
? +++
invoked.append(1)
self.assertEqual(len(invoked), 0)
- import socket
? -----
+ import this
? +++
self.assertEqual(len(invoked), 1)
if __name__ == '__main__':
unittest.main()
|
b544361b2e3f7942a82a911a8d6d314a2044be97
|
almostfunded/wsgi.py
|
almostfunded/wsgi.py
|
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
Add missing import statement for whitenoise
|
Add missing import statement for whitenoise
|
Python
|
mit
|
lorenanicole/almost_funded,lorenanicole/almost_funded,lorenanicole/almost_funded
|
import os
from django.core.wsgi import get_wsgi_application
+ from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
Add missing import statement for whitenoise
|
## Code Before:
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
## Instruction:
Add missing import statement for whitenoise
## Code After:
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
import os
from django.core.wsgi import get_wsgi_application
+ from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
308f1cd155e2db37fe5ff03e158e3d9fc32d6885
|
lehrex/__init__.py
|
lehrex/__init__.py
|
from os.path import dirname, join
from . import csv
from . import math
from . import plots
from . import utils
__version__ = open(join(dirname(__file__), 'VERSION')).read().strip()
__all__ = [
'csv',
'math',
'plots',
'utils',
]
|
from os.path import dirname, join
from . import csv
from . import math
from . import plots
from . import utils
__version__ = open(join(dirname(__file__), 'VERSION')).read().strip()
__all__ = [
'csv',
'math',
'plots',
'utils',
]
class Test:
def foo(self):
return f"{self}"
Test()
|
Introduce Python 3.5 syntax error.
|
Introduce Python 3.5 syntax error.
|
Python
|
mit
|
lkluft/lehrex
|
from os.path import dirname, join
from . import csv
from . import math
from . import plots
from . import utils
__version__ = open(join(dirname(__file__), 'VERSION')).read().strip()
__all__ = [
'csv',
'math',
'plots',
'utils',
]
+
+ class Test:
+ def foo(self):
+ return f"{self}"
+
+
+ Test()
+
|
Introduce Python 3.5 syntax error.
|
## Code Before:
from os.path import dirname, join
from . import csv
from . import math
from . import plots
from . import utils
__version__ = open(join(dirname(__file__), 'VERSION')).read().strip()
__all__ = [
'csv',
'math',
'plots',
'utils',
]
## Instruction:
Introduce Python 3.5 syntax error.
## Code After:
from os.path import dirname, join
from . import csv
from . import math
from . import plots
from . import utils
__version__ = open(join(dirname(__file__), 'VERSION')).read().strip()
__all__ = [
'csv',
'math',
'plots',
'utils',
]
class Test:
def foo(self):
return f"{self}"
Test()
|
from os.path import dirname, join
from . import csv
from . import math
from . import plots
from . import utils
__version__ = open(join(dirname(__file__), 'VERSION')).read().strip()
__all__ = [
'csv',
'math',
'plots',
'utils',
]
+
+
+ class Test:
+ def foo(self):
+ return f"{self}"
+
+
+ Test()
|
0ceedd5b22a42634889b572018db1153e1ef2855
|
tests/integration/services/user_avatar/test_update_avatar_image.py
|
tests/integration/services/user_avatar/test_update_avatar_image.py
|
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
assert avatar.path == data_path / 'global/users/avatars' / expected_filename
|
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
expected = data_path / 'global' / 'users' / 'avatars' / expected_filename
assert avatar.path == expected
|
Use `/` operator to assemble path
|
Use `/` operator to assemble path
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
+ expected = data_path / 'global' / 'users' / 'avatars' / expected_filename
- assert avatar.path == data_path / 'global/users/avatars' / expected_filename
+ assert avatar.path == expected
|
Use `/` operator to assemble path
|
## Code Before:
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
assert avatar.path == data_path / 'global/users/avatars' / expected_filename
## Instruction:
Use `/` operator to assemble path
## Code After:
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
expected = data_path / 'global' / 'users' / 'avatars' / expected_filename
assert avatar.path == expected
|
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
+ expected = data_path / 'global' / 'users' / 'avatars' / expected_filename
- assert avatar.path == data_path / 'global/users/avatars' / expected_filename
+ assert avatar.path == expected
|
5645946d9a99ff86c43c7801053a0ef279dc1382
|
ynr/apps/candidates/csv_helpers.py
|
ynr/apps/candidates/csv_helpers.py
|
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row["name"].split()[-1],
row["name"].rsplit(None, 1)[0],
not row["election_current"],
row["election_date"],
row["election"],
row["post_label"],
)
def _candidate_sort_by_post_key(row):
return (
not row["election_current"],
row["election_date"],
row["election"],
row["post_label"],
row["name"].split()[-1],
row["name"].rsplit(None, 1)[0],
)
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
|
from collections import defaultdict
from compat import BufferDictWriter
from django.conf import settings
from popolo.models import Membership
from candidates.models import PersonRedirect
def list_to_csv(membership_list):
csv_fields = settings.CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
for row in membership_list:
writer.writerow(row)
return writer.output
def memberships_dicts_for_csv(election_slug=None, post_slug=None):
redirects = PersonRedirect.all_redirects_dict()
memberships = Membership.objects.joins_for_csv()
if election_slug:
memberships = memberships.filter(
post_election__election__slug=election_slug
)
if post_slug:
memberships = memberships.filter(post_election__post__slug=post_slug)
memberships_by_election = defaultdict(list)
elected_by_election = defaultdict(list)
for membership in memberships:
election_slug = membership.post_election.election.slug
line = membership.dict_for_csv(redirects=redirects)
memberships_by_election[election_slug].append(line)
if membership.elected:
elected_by_election[election_slug].append(line)
return (memberships_by_election, elected_by_election)
|
Move to using membership based CSV output
|
Move to using membership based CSV output
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
+ from collections import defaultdict
+
from compat import BufferDictWriter
- from .models import CSV_ROW_FIELDS
+ from django.conf import settings
+
+ from popolo.models import Membership
+ from candidates.models import PersonRedirect
+ def list_to_csv(membership_list):
- def _candidate_sort_by_name_key(row):
- return (
- row["name"].split()[-1],
- row["name"].rsplit(None, 1)[0],
- not row["election_current"],
- row["election_date"],
- row["election"],
- row["post_label"],
- )
+ csv_fields = settings.CSV_ROW_FIELDS
-
- def _candidate_sort_by_post_key(row):
- return (
- not row["election_current"],
- row["election_date"],
- row["election"],
- row["post_label"],
- row["name"].split()[-1],
- row["name"].rsplit(None, 1)[0],
- )
-
-
- def list_to_csv(candidates_list, group_by_post=False):
- from .election_specific import EXTRA_CSV_ROW_FIELDS
-
- csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
+ for row in membership_list:
- if group_by_post:
- sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
- else:
- sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
- for row in sorted_rows:
writer.writerow(row)
return writer.output
+
+ def memberships_dicts_for_csv(election_slug=None, post_slug=None):
+ redirects = PersonRedirect.all_redirects_dict()
+ memberships = Membership.objects.joins_for_csv()
+ if election_slug:
+ memberships = memberships.filter(
+ post_election__election__slug=election_slug
+ )
+ if post_slug:
+ memberships = memberships.filter(post_election__post__slug=post_slug)
+
+ memberships_by_election = defaultdict(list)
+ elected_by_election = defaultdict(list)
+
+ for membership in memberships:
+ election_slug = membership.post_election.election.slug
+ line = membership.dict_for_csv(redirects=redirects)
+ memberships_by_election[election_slug].append(line)
+ if membership.elected:
+ elected_by_election[election_slug].append(line)
+
+ return (memberships_by_election, elected_by_election)
+
|
Move to using membership based CSV output
|
## Code Before:
from compat import BufferDictWriter
from .models import CSV_ROW_FIELDS
def _candidate_sort_by_name_key(row):
return (
row["name"].split()[-1],
row["name"].rsplit(None, 1)[0],
not row["election_current"],
row["election_date"],
row["election"],
row["post_label"],
)
def _candidate_sort_by_post_key(row):
return (
not row["election_current"],
row["election_date"],
row["election"],
row["post_label"],
row["name"].split()[-1],
row["name"].rsplit(None, 1)[0],
)
def list_to_csv(candidates_list, group_by_post=False):
from .election_specific import EXTRA_CSV_ROW_FIELDS
csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
if group_by_post:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
else:
sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
for row in sorted_rows:
writer.writerow(row)
return writer.output
## Instruction:
Move to using membership based CSV output
## Code After:
from collections import defaultdict
from compat import BufferDictWriter
from django.conf import settings
from popolo.models import Membership
from candidates.models import PersonRedirect
def list_to_csv(membership_list):
csv_fields = settings.CSV_ROW_FIELDS
writer = BufferDictWriter(fieldnames=csv_fields)
writer.writeheader()
for row in membership_list:
writer.writerow(row)
return writer.output
def memberships_dicts_for_csv(election_slug=None, post_slug=None):
redirects = PersonRedirect.all_redirects_dict()
memberships = Membership.objects.joins_for_csv()
if election_slug:
memberships = memberships.filter(
post_election__election__slug=election_slug
)
if post_slug:
memberships = memberships.filter(post_election__post__slug=post_slug)
memberships_by_election = defaultdict(list)
elected_by_election = defaultdict(list)
for membership in memberships:
election_slug = membership.post_election.election.slug
line = membership.dict_for_csv(redirects=redirects)
memberships_by_election[election_slug].append(line)
if membership.elected:
elected_by_election[election_slug].append(line)
return (memberships_by_election, elected_by_election)
|
+ from collections import defaultdict
+
from compat import BufferDictWriter
- from .models import CSV_ROW_FIELDS
+ from django.conf import settings
+
+ from popolo.models import Membership
+ from candidates.models import PersonRedirect
+ def list_to_csv(membership_list):
+
+ csv_fields = settings.CSV_ROW_FIELDS
+ writer = BufferDictWriter(fieldnames=csv_fields)
+ writer.writeheader()
+ for row in membership_list:
+ writer.writerow(row)
+ return writer.output
- def _candidate_sort_by_name_key(row):
- return (
- row["name"].split()[-1],
- row["name"].rsplit(None, 1)[0],
- not row["election_current"],
- row["election_date"],
- row["election"],
- row["post_label"],
- )
- def _candidate_sort_by_post_key(row):
- return (
- not row["election_current"],
- row["election_date"],
- row["election"],
- row["post_label"],
- row["name"].split()[-1],
- row["name"].rsplit(None, 1)[0],
- )
+ def memberships_dicts_for_csv(election_slug=None, post_slug=None):
+ redirects = PersonRedirect.all_redirects_dict()
+ memberships = Membership.objects.joins_for_csv()
+ if election_slug:
+ memberships = memberships.filter(
+ post_election__election__slug=election_slug
+ )
+ if post_slug:
+ memberships = memberships.filter(post_election__post__slug=post_slug)
+ memberships_by_election = defaultdict(list)
+ elected_by_election = defaultdict(list)
- def list_to_csv(candidates_list, group_by_post=False):
- from .election_specific import EXTRA_CSV_ROW_FIELDS
+ for membership in memberships:
+ election_slug = membership.post_election.election.slug
+ line = membership.dict_for_csv(redirects=redirects)
+ memberships_by_election[election_slug].append(line)
+ if membership.elected:
+ elected_by_election[election_slug].append(line)
+ return (memberships_by_election, elected_by_election)
- csv_fields = CSV_ROW_FIELDS + EXTRA_CSV_ROW_FIELDS
- writer = BufferDictWriter(fieldnames=csv_fields)
- writer.writeheader()
- if group_by_post:
- sorted_rows = sorted(candidates_list, key=_candidate_sort_by_post_key)
- else:
- sorted_rows = sorted(candidates_list, key=_candidate_sort_by_name_key)
- for row in sorted_rows:
- writer.writerow(row)
- return writer.output
|
f85001b39f8f8097c20a197f8cbde70d7ec8e88b
|
tests/test_extension.py
|
tests/test_extension.py
|
import mock
from mopidy_spotify import Extension, backend as backend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[spotify]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'username' in schema
assert 'password' in schema
assert 'bitrate' in schema
assert 'timeout' in schema
assert 'cache_dir' in schema
def test_setup():
registry = mock.Mock()
ext = Extension()
ext.setup(registry)
registry.add.assert_called_with('backend', backend_lib.SpotifyBackend)
|
import mock
from mopidy_spotify import Extension, backend as backend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[spotify]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'username' in schema
assert 'password' in schema
assert 'bitrate' in schema
assert 'timeout' in schema
assert 'cache_dir' in schema
assert 'settings_dir' in schema
assert 'toplist_countries' in schema
def test_setup():
registry = mock.Mock()
ext = Extension()
ext.setup(registry)
registry.add.assert_called_with('backend', backend_lib.SpotifyBackend)
|
Test existing config schema members
|
Test existing config schema members
|
Python
|
apache-2.0
|
jodal/mopidy-spotify,kingosticks/mopidy-spotify,mopidy/mopidy-spotify
|
import mock
from mopidy_spotify import Extension, backend as backend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[spotify]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'username' in schema
assert 'password' in schema
assert 'bitrate' in schema
assert 'timeout' in schema
assert 'cache_dir' in schema
+ assert 'settings_dir' in schema
+ assert 'toplist_countries' in schema
def test_setup():
registry = mock.Mock()
ext = Extension()
ext.setup(registry)
registry.add.assert_called_with('backend', backend_lib.SpotifyBackend)
|
Test existing config schema members
|
## Code Before:
import mock
from mopidy_spotify import Extension, backend as backend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[spotify]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'username' in schema
assert 'password' in schema
assert 'bitrate' in schema
assert 'timeout' in schema
assert 'cache_dir' in schema
def test_setup():
registry = mock.Mock()
ext = Extension()
ext.setup(registry)
registry.add.assert_called_with('backend', backend_lib.SpotifyBackend)
## Instruction:
Test existing config schema members
## Code After:
import mock
from mopidy_spotify import Extension, backend as backend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[spotify]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'username' in schema
assert 'password' in schema
assert 'bitrate' in schema
assert 'timeout' in schema
assert 'cache_dir' in schema
assert 'settings_dir' in schema
assert 'toplist_countries' in schema
def test_setup():
registry = mock.Mock()
ext = Extension()
ext.setup(registry)
registry.add.assert_called_with('backend', backend_lib.SpotifyBackend)
|
import mock
from mopidy_spotify import Extension, backend as backend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[spotify]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'username' in schema
assert 'password' in schema
assert 'bitrate' in schema
assert 'timeout' in schema
assert 'cache_dir' in schema
+ assert 'settings_dir' in schema
+ assert 'toplist_countries' in schema
def test_setup():
registry = mock.Mock()
ext = Extension()
ext.setup(registry)
registry.add.assert_called_with('backend', backend_lib.SpotifyBackend)
|
3a2ca4573866b7b81d4b946ce87b9f36b487d272
|
src/dojo.py
|
src/dojo.py
|
class Dojo(object):
def __init__(self):
self.all_rooms = []
self.all_people = []
def create_room(self, room_type, room_name):
pass
|
class Dojo(object):
"""This class is responsible for managing and allocating rooms to people"""
def __init__(self):
self.all_rooms = []
self.all_people = []
def create_room(self, room_type, room_name):
pass
|
Add docstring to Dojo class
|
Add docstring to Dojo class
|
Python
|
mit
|
EdwinKato/Space-Allocator,EdwinKato/Space-Allocator
|
class Dojo(object):
+ """This class is responsible for managing and allocating rooms to people"""
def __init__(self):
self.all_rooms = []
self.all_people = []
def create_room(self, room_type, room_name):
pass
|
Add docstring to Dojo class
|
## Code Before:
class Dojo(object):
def __init__(self):
self.all_rooms = []
self.all_people = []
def create_room(self, room_type, room_name):
pass
## Instruction:
Add docstring to Dojo class
## Code After:
class Dojo(object):
"""This class is responsible for managing and allocating rooms to people"""
def __init__(self):
self.all_rooms = []
self.all_people = []
def create_room(self, room_type, room_name):
pass
|
class Dojo(object):
+ """This class is responsible for managing and allocating rooms to people"""
def __init__(self):
self.all_rooms = []
self.all_people = []
def create_room(self, room_type, room_name):
pass
|
ecd201216562c8b802fada27e2f79cda5b05a4d5
|
cron/__init__.py
|
cron/__init__.py
|
import schedule
import settings
from .poll_pull_requests import poll_pull_requests as poll_pull_requests
from .restart_homepage import restart_homepage as restart_homepage
def schedule_jobs():
schedule.every(settings.PULL_REQUEST_POLLING_INTERVAL_SECONDS).seconds.do(poll_pull_requests)
schedule.every(settings.FALLBACK_WINDOW).hours.do(check_fallback)
schedule.every(120).seconds.do(restart_homepage)
|
import schedule
import settings
from .poll_pull_requests import poll_pull_requests as poll_pull_requests
from .restart_homepage import restart_homepage as restart_homepage
def schedule_jobs():
schedule.every(settings.PULL_REQUEST_POLLING_INTERVAL_SECONDS).seconds.do(poll_pull_requests)
schedule.every(settings.FALLBACK_WINDOW_SECONDS).seconds.do(check_fallback)
schedule.every(120).seconds.do(restart_homepage)
|
Change hours to seconds in cron job
|
Change hours to seconds in cron job
|
Python
|
mit
|
amoffat/Chaos,eukaryote31/chaos,phil-r/chaos,g19fanatic/chaos,phil-r/chaos,botchaos/Chaos,Chaosthebot/Chaos,mark-i-m/Chaos,chaosbot/Chaos,g19fanatic/chaos,phil-r/chaos,hongaar/chaos,rudehn/chaos,rudehn/chaos,Chaosthebot/Chaos,botchaos/Chaos,mpnordland/chaos,amoffat/Chaos,mark-i-m/Chaos,chaosbot/Chaos,amoffat/Chaos,amoffat/Chaos,mpnordland/chaos,eukaryote31/chaos,botchaos/Chaos,amoffat/Chaos,chaosbot/Chaos,mpnordland/chaos,eukaryote31/chaos,Chaosthebot/Chaos,rudehn/chaos,eamanu/Chaos,hongaar/chaos,g19fanatic/chaos,mpnordland/chaos,g19fanatic/chaos,eamanu/Chaos,g19fanatic/chaos,rudehn/chaos,Chaosthebot/Chaos,mark-i-m/Chaos,eukaryote31/chaos,mpnordland/chaos,eamanu/Chaos,eukaryote31/chaos,chaosbot/Chaos,hongaar/chaos,botchaos/Chaos,chaosbot/Chaos,phil-r/chaos,mark-i-m/Chaos,eamanu/Chaos,hongaar/chaos,rudehn/chaos,Chaosthebot/Chaos,hongaar/chaos,mark-i-m/Chaos,eamanu/Chaos,phil-r/chaos,botchaos/Chaos
|
import schedule
import settings
from .poll_pull_requests import poll_pull_requests as poll_pull_requests
from .restart_homepage import restart_homepage as restart_homepage
def schedule_jobs():
schedule.every(settings.PULL_REQUEST_POLLING_INTERVAL_SECONDS).seconds.do(poll_pull_requests)
- schedule.every(settings.FALLBACK_WINDOW).hours.do(check_fallback)
+ schedule.every(settings.FALLBACK_WINDOW_SECONDS).seconds.do(check_fallback)
schedule.every(120).seconds.do(restart_homepage)
|
Change hours to seconds in cron job
|
## Code Before:
import schedule
import settings
from .poll_pull_requests import poll_pull_requests as poll_pull_requests
from .restart_homepage import restart_homepage as restart_homepage
def schedule_jobs():
schedule.every(settings.PULL_REQUEST_POLLING_INTERVAL_SECONDS).seconds.do(poll_pull_requests)
schedule.every(settings.FALLBACK_WINDOW).hours.do(check_fallback)
schedule.every(120).seconds.do(restart_homepage)
## Instruction:
Change hours to seconds in cron job
## Code After:
import schedule
import settings
from .poll_pull_requests import poll_pull_requests as poll_pull_requests
from .restart_homepage import restart_homepage as restart_homepage
def schedule_jobs():
schedule.every(settings.PULL_REQUEST_POLLING_INTERVAL_SECONDS).seconds.do(poll_pull_requests)
schedule.every(settings.FALLBACK_WINDOW_SECONDS).seconds.do(check_fallback)
schedule.every(120).seconds.do(restart_homepage)
|
import schedule
import settings
from .poll_pull_requests import poll_pull_requests as poll_pull_requests
from .restart_homepage import restart_homepage as restart_homepage
def schedule_jobs():
schedule.every(settings.PULL_REQUEST_POLLING_INTERVAL_SECONDS).seconds.do(poll_pull_requests)
- schedule.every(settings.FALLBACK_WINDOW).hours.do(check_fallback)
? ^ ^^
+ schedule.every(settings.FALLBACK_WINDOW_SECONDS).seconds.do(check_fallback)
? ++++++++ ^^^ ^^
schedule.every(120).seconds.do(restart_homepage)
|
090bcbf8bbc32a2a8da5f0ab2be097e5a6716c3d
|
src/adhocracy_frontend/adhocracy_frontend/tests/integration/test_jasmine.py
|
src/adhocracy_frontend/adhocracy_frontend/tests/integration/test_jasmine.py
|
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
@mark.xfail
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
Mark integration tests as xfail
|
Mark integration tests as xfail
|
Python
|
agpl-3.0
|
fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator
|
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
+ @mark.xfail
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
Mark integration tests as xfail
|
## Code Before:
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
## Instruction:
Mark integration tests as xfail
## Code After:
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
@mark.xfail
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
+ @mark.xfail
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
10782310cfee0d2c2938748056f6549b5918b969
|
src/sentry/debug/utils/patch_context.py
|
src/sentry/debug/utils/patch_context.py
|
from __future__ import absolute_import
from sentry.utils.imports import import_string
class PatchContext(object):
def __init__(self, target, callback):
target, attr = target.rsplit('.', 1)
target = import_string(target)
self.func = getattr(target, attr)
self.target = target
self.attr = attr
self.callback = callback
def __enter__(self):
self.patch()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.unpatch()
def patch(self):
func = getattr(self.target, self.attr)
def wrapped(*args, **kwargs):
__traceback_hide__ = True # NOQA
return self.callback(self.func, *args, **kwargs)
wrapped.__name__ = func.__name__
if hasattr(func, '__doc__'):
wrapped.__doc__ = func.__doc__
if hasattr(func, '__module__'):
wrapped.__module__ = func.__module__
setattr(self.target, self.attr, wrapped)
def unpatch(self):
setattr(self.target, self.attr, self.func)
|
from __future__ import absolute_import
from threading import Lock
from sentry.utils.imports import import_string
class PatchContext(object):
def __init__(self, target, callback):
target, attr = target.rsplit('.', 1)
target = import_string(target)
self.target = target
self.attr = attr
self.callback = callback
self._lock = Lock()
with self._lock:
self.func = getattr(target, attr)
def __enter__(self):
self.patch()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.unpatch()
def patch(self):
with self._lock:
func = getattr(self.target, self.attr)
def wrapped(*args, **kwargs):
__traceback_hide__ = True # NOQA
return self.callback(self.func, *args, **kwargs)
wrapped.__name__ = func.__name__
if hasattr(func, '__doc__'):
wrapped.__doc__ = func.__doc__
if hasattr(func, '__module__'):
wrapped.__module__ = func.__module__
setattr(self.target, self.attr, wrapped)
def unpatch(self):
with self._lock:
setattr(self.target, self.attr, self.func)
|
Use a thread lock to patch contexts.
|
Use a thread lock to patch contexts.
This fixes #3185
|
Python
|
bsd-3-clause
|
looker/sentry,zenefits/sentry,mvaled/sentry,alexm92/sentry,alexm92/sentry,looker/sentry,gencer/sentry,ifduyue/sentry,jean/sentry,JackDanger/sentry,JackDanger/sentry,ifduyue/sentry,BuildingLink/sentry,gencer/sentry,beeftornado/sentry,BuildingLink/sentry,mvaled/sentry,JamesMura/sentry,jean/sentry,zenefits/sentry,zenefits/sentry,mvaled/sentry,mvaled/sentry,jean/sentry,JamesMura/sentry,zenefits/sentry,mvaled/sentry,JamesMura/sentry,fotinakis/sentry,ifduyue/sentry,gencer/sentry,JackDanger/sentry,mitsuhiko/sentry,jean/sentry,zenefits/sentry,mvaled/sentry,gencer/sentry,BuildingLink/sentry,fotinakis/sentry,JamesMura/sentry,mitsuhiko/sentry,beeftornado/sentry,beeftornado/sentry,fotinakis/sentry,ifduyue/sentry,alexm92/sentry,BuildingLink/sentry,JamesMura/sentry,looker/sentry,gencer/sentry,looker/sentry,fotinakis/sentry,looker/sentry,BuildingLink/sentry,ifduyue/sentry,jean/sentry
|
from __future__ import absolute_import
+ from threading import Lock
from sentry.utils.imports import import_string
class PatchContext(object):
def __init__(self, target, callback):
target, attr = target.rsplit('.', 1)
target = import_string(target)
- self.func = getattr(target, attr)
self.target = target
self.attr = attr
self.callback = callback
+ self._lock = Lock()
+ with self._lock:
+ self.func = getattr(target, attr)
def __enter__(self):
self.patch()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.unpatch()
def patch(self):
+ with self._lock:
- func = getattr(self.target, self.attr)
+ func = getattr(self.target, self.attr)
- def wrapped(*args, **kwargs):
+ def wrapped(*args, **kwargs):
- __traceback_hide__ = True # NOQA
+ __traceback_hide__ = True # NOQA
- return self.callback(self.func, *args, **kwargs)
+ return self.callback(self.func, *args, **kwargs)
- wrapped.__name__ = func.__name__
+ wrapped.__name__ = func.__name__
- if hasattr(func, '__doc__'):
+ if hasattr(func, '__doc__'):
- wrapped.__doc__ = func.__doc__
+ wrapped.__doc__ = func.__doc__
- if hasattr(func, '__module__'):
+ if hasattr(func, '__module__'):
- wrapped.__module__ = func.__module__
+ wrapped.__module__ = func.__module__
- setattr(self.target, self.attr, wrapped)
+ setattr(self.target, self.attr, wrapped)
def unpatch(self):
+ with self._lock:
- setattr(self.target, self.attr, self.func)
+ setattr(self.target, self.attr, self.func)
|
Use a thread lock to patch contexts.
|
## Code Before:
from __future__ import absolute_import
from sentry.utils.imports import import_string
class PatchContext(object):
def __init__(self, target, callback):
target, attr = target.rsplit('.', 1)
target = import_string(target)
self.func = getattr(target, attr)
self.target = target
self.attr = attr
self.callback = callback
def __enter__(self):
self.patch()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.unpatch()
def patch(self):
func = getattr(self.target, self.attr)
def wrapped(*args, **kwargs):
__traceback_hide__ = True # NOQA
return self.callback(self.func, *args, **kwargs)
wrapped.__name__ = func.__name__
if hasattr(func, '__doc__'):
wrapped.__doc__ = func.__doc__
if hasattr(func, '__module__'):
wrapped.__module__ = func.__module__
setattr(self.target, self.attr, wrapped)
def unpatch(self):
setattr(self.target, self.attr, self.func)
## Instruction:
Use a thread lock to patch contexts.
## Code After:
from __future__ import absolute_import
from threading import Lock
from sentry.utils.imports import import_string
class PatchContext(object):
def __init__(self, target, callback):
target, attr = target.rsplit('.', 1)
target = import_string(target)
self.target = target
self.attr = attr
self.callback = callback
self._lock = Lock()
with self._lock:
self.func = getattr(target, attr)
def __enter__(self):
self.patch()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.unpatch()
def patch(self):
with self._lock:
func = getattr(self.target, self.attr)
def wrapped(*args, **kwargs):
__traceback_hide__ = True # NOQA
return self.callback(self.func, *args, **kwargs)
wrapped.__name__ = func.__name__
if hasattr(func, '__doc__'):
wrapped.__doc__ = func.__doc__
if hasattr(func, '__module__'):
wrapped.__module__ = func.__module__
setattr(self.target, self.attr, wrapped)
def unpatch(self):
with self._lock:
setattr(self.target, self.attr, self.func)
|
from __future__ import absolute_import
+ from threading import Lock
from sentry.utils.imports import import_string
class PatchContext(object):
def __init__(self, target, callback):
target, attr = target.rsplit('.', 1)
target = import_string(target)
- self.func = getattr(target, attr)
self.target = target
self.attr = attr
self.callback = callback
+ self._lock = Lock()
+ with self._lock:
+ self.func = getattr(target, attr)
def __enter__(self):
self.patch()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.unpatch()
def patch(self):
+ with self._lock:
- func = getattr(self.target, self.attr)
+ func = getattr(self.target, self.attr)
? ++++
- def wrapped(*args, **kwargs):
+ def wrapped(*args, **kwargs):
? ++++
- __traceback_hide__ = True # NOQA
+ __traceback_hide__ = True # NOQA
? ++++
- return self.callback(self.func, *args, **kwargs)
+ return self.callback(self.func, *args, **kwargs)
? ++++
- wrapped.__name__ = func.__name__
+ wrapped.__name__ = func.__name__
? ++++
- if hasattr(func, '__doc__'):
+ if hasattr(func, '__doc__'):
? ++++
- wrapped.__doc__ = func.__doc__
+ wrapped.__doc__ = func.__doc__
? ++++
- if hasattr(func, '__module__'):
+ if hasattr(func, '__module__'):
? ++++
- wrapped.__module__ = func.__module__
+ wrapped.__module__ = func.__module__
? ++++
- setattr(self.target, self.attr, wrapped)
+ setattr(self.target, self.attr, wrapped)
? ++++
def unpatch(self):
+ with self._lock:
- setattr(self.target, self.attr, self.func)
+ setattr(self.target, self.attr, self.func)
? ++++
|
a2c69058316971cd753edba607160d62df337b77
|
tests/test_middleware.py
|
tests/test_middleware.py
|
"""Tests for respite.middleware."""
from nose.tools import *
from urllib import urlencode
from django.utils import simplejson as json
from django.test.client import Client, RequestFactory
from respite.middleware import *
client = Client()
def test_json_middleware():
request = RequestFactory().post(
path = '/',
data = json.dumps({
'foo': 'foo',
'bar': 'bar',
'baz': 'baz'
}),
content_type = 'application/json'
)
JsonMiddleware().process_request(request)
assert_equal(request.POST, {
'foo': ['foo'],
'bar': ['bar'],
'baz': ['baz']
})
def test_http_method_override_middleware():
request = RequestFactory().post(
path = '/',
data = {
'foo': 'bar',
'_method': 'PUT'
}
)
HttpMethodOverrideMiddleware().process_request(request)
assert_equal(request.method, 'PUT')
assert_equal(request.POST, {})
def test_http_put_middleware():
request = RequestFactory().put(
path = '/',
data = urlencode({
'foo': 'bar'
}),
content_type = "application/x-www-form-urlencoded"
)
HttpPutMiddleware().process_request(request)
assert_equal(request.PUT, {
'foo': ['bar']
})
|
"""Tests for respite.middleware."""
from nose.tools import *
from urllib import urlencode
from django.utils import simplejson as json
from django.test.client import Client, RequestFactory
from respite.middleware import *
client = Client()
def test_json_middleware():
request = RequestFactory().post(
path = '/',
data = json.dumps({
'foo': 'foo',
'bar': 'bar',
'baz': 'baz',
'hogera': [
{'hoge': 'hoge'},
{'fuga': 'fuga'}
]
}),
content_type = 'application/json'
)
JsonMiddleware().process_request(request)
assert_equal(request.POST, {
'foo': ['foo'],
'bar': ['bar'],
'baz': ['baz'],
'hogera': [
{'hoge': ['hoge']},
{'fuga': ['fuga']}
]
})
def test_http_method_override_middleware():
request = RequestFactory().post(
path = '/',
data = {
'foo': 'bar',
'_method': 'PUT'
}
)
HttpMethodOverrideMiddleware().process_request(request)
assert_equal(request.method, 'PUT')
assert_equal(request.POST, {})
def test_http_put_middleware():
request = RequestFactory().put(
path = '/',
data = urlencode({
'foo': 'bar'
}),
content_type = "application/x-www-form-urlencoded"
)
HttpPutMiddleware().process_request(request)
assert_equal(request.PUT, {
'foo': ['bar']
})
|
Modify test to test for nested JSON
|
Modify test to test for nested JSON
|
Python
|
mit
|
jgorset/django-respite,jgorset/django-respite,jgorset/django-respite
|
"""Tests for respite.middleware."""
from nose.tools import *
from urllib import urlencode
from django.utils import simplejson as json
from django.test.client import Client, RequestFactory
from respite.middleware import *
client = Client()
def test_json_middleware():
request = RequestFactory().post(
path = '/',
data = json.dumps({
'foo': 'foo',
'bar': 'bar',
- 'baz': 'baz'
+ 'baz': 'baz',
+ 'hogera': [
+ {'hoge': 'hoge'},
+ {'fuga': 'fuga'}
+ ]
}),
content_type = 'application/json'
)
JsonMiddleware().process_request(request)
assert_equal(request.POST, {
'foo': ['foo'],
'bar': ['bar'],
- 'baz': ['baz']
+ 'baz': ['baz'],
+ 'hogera': [
+ {'hoge': ['hoge']},
+ {'fuga': ['fuga']}
+ ]
})
def test_http_method_override_middleware():
request = RequestFactory().post(
path = '/',
data = {
'foo': 'bar',
'_method': 'PUT'
}
)
HttpMethodOverrideMiddleware().process_request(request)
assert_equal(request.method, 'PUT')
assert_equal(request.POST, {})
def test_http_put_middleware():
request = RequestFactory().put(
path = '/',
data = urlencode({
'foo': 'bar'
}),
content_type = "application/x-www-form-urlencoded"
)
HttpPutMiddleware().process_request(request)
assert_equal(request.PUT, {
'foo': ['bar']
})
|
Modify test to test for nested JSON
|
## Code Before:
"""Tests for respite.middleware."""
from nose.tools import *
from urllib import urlencode
from django.utils import simplejson as json
from django.test.client import Client, RequestFactory
from respite.middleware import *
client = Client()
def test_json_middleware():
request = RequestFactory().post(
path = '/',
data = json.dumps({
'foo': 'foo',
'bar': 'bar',
'baz': 'baz'
}),
content_type = 'application/json'
)
JsonMiddleware().process_request(request)
assert_equal(request.POST, {
'foo': ['foo'],
'bar': ['bar'],
'baz': ['baz']
})
def test_http_method_override_middleware():
request = RequestFactory().post(
path = '/',
data = {
'foo': 'bar',
'_method': 'PUT'
}
)
HttpMethodOverrideMiddleware().process_request(request)
assert_equal(request.method, 'PUT')
assert_equal(request.POST, {})
def test_http_put_middleware():
request = RequestFactory().put(
path = '/',
data = urlencode({
'foo': 'bar'
}),
content_type = "application/x-www-form-urlencoded"
)
HttpPutMiddleware().process_request(request)
assert_equal(request.PUT, {
'foo': ['bar']
})
## Instruction:
Modify test to test for nested JSON
## Code After:
"""Tests for respite.middleware."""
from nose.tools import *
from urllib import urlencode
from django.utils import simplejson as json
from django.test.client import Client, RequestFactory
from respite.middleware import *
client = Client()
def test_json_middleware():
request = RequestFactory().post(
path = '/',
data = json.dumps({
'foo': 'foo',
'bar': 'bar',
'baz': 'baz',
'hogera': [
{'hoge': 'hoge'},
{'fuga': 'fuga'}
]
}),
content_type = 'application/json'
)
JsonMiddleware().process_request(request)
assert_equal(request.POST, {
'foo': ['foo'],
'bar': ['bar'],
'baz': ['baz'],
'hogera': [
{'hoge': ['hoge']},
{'fuga': ['fuga']}
]
})
def test_http_method_override_middleware():
request = RequestFactory().post(
path = '/',
data = {
'foo': 'bar',
'_method': 'PUT'
}
)
HttpMethodOverrideMiddleware().process_request(request)
assert_equal(request.method, 'PUT')
assert_equal(request.POST, {})
def test_http_put_middleware():
request = RequestFactory().put(
path = '/',
data = urlencode({
'foo': 'bar'
}),
content_type = "application/x-www-form-urlencoded"
)
HttpPutMiddleware().process_request(request)
assert_equal(request.PUT, {
'foo': ['bar']
})
|
"""Tests for respite.middleware."""
from nose.tools import *
from urllib import urlencode
from django.utils import simplejson as json
from django.test.client import Client, RequestFactory
from respite.middleware import *
client = Client()
def test_json_middleware():
request = RequestFactory().post(
path = '/',
data = json.dumps({
'foo': 'foo',
'bar': 'bar',
- 'baz': 'baz'
+ 'baz': 'baz',
? +
+ 'hogera': [
+ {'hoge': 'hoge'},
+ {'fuga': 'fuga'}
+ ]
}),
content_type = 'application/json'
)
JsonMiddleware().process_request(request)
assert_equal(request.POST, {
'foo': ['foo'],
'bar': ['bar'],
- 'baz': ['baz']
+ 'baz': ['baz'],
? +
+ 'hogera': [
+ {'hoge': ['hoge']},
+ {'fuga': ['fuga']}
+ ]
})
def test_http_method_override_middleware():
request = RequestFactory().post(
path = '/',
data = {
'foo': 'bar',
'_method': 'PUT'
}
)
HttpMethodOverrideMiddleware().process_request(request)
assert_equal(request.method, 'PUT')
assert_equal(request.POST, {})
def test_http_put_middleware():
request = RequestFactory().put(
path = '/',
data = urlencode({
'foo': 'bar'
}),
content_type = "application/x-www-form-urlencoded"
)
HttpPutMiddleware().process_request(request)
assert_equal(request.PUT, {
'foo': ['bar']
})
|
c1eafa32f9fafa859a0fcaf047f4a80b9bc52969
|
setup.py
|
setup.py
|
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='[email protected]',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
|
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='[email protected]',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
)
|
Add PyPI classifiers for python versions
|
Add PyPI classifiers for python versions
|
Python
|
apache-2.0
|
penpen/whisper,deniszh/whisper,graphite-server/whisper,alexandreboisvert/whisper,akbooer/whisper,graphite-project/whisper,cbowman0/whisper,piotr1212/whisper,kerlandsson/whisper,obfuscurity/whisper
|
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='[email protected]',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
+ classifiers=[
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ ],
)
|
Add PyPI classifiers for python versions
|
## Code Before:
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='[email protected]',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
## Instruction:
Add PyPI classifiers for python versions
## Code After:
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='[email protected]',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
)
|
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='[email protected]',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
+ classifiers=[
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ ],
)
|
bb8d9aa91b6d1bf2a765113d5845402c059e6969
|
IPython/core/payloadpage.py
|
IPython/core/payloadpage.py
|
"""A payload based version of page."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from IPython.core.getipython import get_ipython
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
def page(strng, start=0, screen_lines=0, pager_cmd=None):
"""Print a string, piping through a pager.
This version ignores the screen_lines and pager_cmd arguments and uses
IPython's payload system instead.
Parameters
----------
strng : str or mime-dict
Text to page, or a mime-type keyed dict of already formatted data.
start : int
Starting line at which to place the display.
"""
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
shell = get_ipython()
if isinstance(strng, dict):
data = strng
else:
data = {'text/plain' : strng}
payload = dict(
source='page',
data=data,
text=strng,
start=start,
screen_lines=screen_lines,
)
shell.payload_manager.write_payload(payload)
def install_payload_page():
"""Install this version of page as IPython.core.page.page."""
from IPython.core import page as corepage
corepage.page = page
|
"""A payload based version of page."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from IPython.core.getipython import get_ipython
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
def page(strng, start=0, screen_lines=0, pager_cmd=None):
"""Print a string, piping through a pager.
This version ignores the screen_lines and pager_cmd arguments and uses
IPython's payload system instead.
Parameters
----------
strng : str or mime-dict
Text to page, or a mime-type keyed dict of already formatted data.
start : int
Starting line at which to place the display.
"""
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
shell = get_ipython()
if isinstance(strng, dict):
data = strng
else:
data = {'text/plain' : strng}
payload = dict(
source='page',
data=data,
start=start,
screen_lines=screen_lines,
)
shell.payload_manager.write_payload(payload)
def install_payload_page():
"""Install this version of page as IPython.core.page.page."""
from IPython.core import page as corepage
corepage.page = page
|
Remove leftover text key from our own payload creation
|
Remove leftover text key from our own payload creation
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
"""A payload based version of page."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from IPython.core.getipython import get_ipython
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
def page(strng, start=0, screen_lines=0, pager_cmd=None):
"""Print a string, piping through a pager.
This version ignores the screen_lines and pager_cmd arguments and uses
IPython's payload system instead.
Parameters
----------
strng : str or mime-dict
Text to page, or a mime-type keyed dict of already formatted data.
start : int
Starting line at which to place the display.
"""
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
shell = get_ipython()
if isinstance(strng, dict):
data = strng
else:
data = {'text/plain' : strng}
payload = dict(
source='page',
data=data,
- text=strng,
start=start,
screen_lines=screen_lines,
)
shell.payload_manager.write_payload(payload)
def install_payload_page():
"""Install this version of page as IPython.core.page.page."""
from IPython.core import page as corepage
corepage.page = page
|
Remove leftover text key from our own payload creation
|
## Code Before:
"""A payload based version of page."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from IPython.core.getipython import get_ipython
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
def page(strng, start=0, screen_lines=0, pager_cmd=None):
"""Print a string, piping through a pager.
This version ignores the screen_lines and pager_cmd arguments and uses
IPython's payload system instead.
Parameters
----------
strng : str or mime-dict
Text to page, or a mime-type keyed dict of already formatted data.
start : int
Starting line at which to place the display.
"""
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
shell = get_ipython()
if isinstance(strng, dict):
data = strng
else:
data = {'text/plain' : strng}
payload = dict(
source='page',
data=data,
text=strng,
start=start,
screen_lines=screen_lines,
)
shell.payload_manager.write_payload(payload)
def install_payload_page():
"""Install this version of page as IPython.core.page.page."""
from IPython.core import page as corepage
corepage.page = page
## Instruction:
Remove leftover text key from our own payload creation
## Code After:
"""A payload based version of page."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from IPython.core.getipython import get_ipython
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
def page(strng, start=0, screen_lines=0, pager_cmd=None):
"""Print a string, piping through a pager.
This version ignores the screen_lines and pager_cmd arguments and uses
IPython's payload system instead.
Parameters
----------
strng : str or mime-dict
Text to page, or a mime-type keyed dict of already formatted data.
start : int
Starting line at which to place the display.
"""
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
shell = get_ipython()
if isinstance(strng, dict):
data = strng
else:
data = {'text/plain' : strng}
payload = dict(
source='page',
data=data,
start=start,
screen_lines=screen_lines,
)
shell.payload_manager.write_payload(payload)
def install_payload_page():
"""Install this version of page as IPython.core.page.page."""
from IPython.core import page as corepage
corepage.page = page
|
"""A payload based version of page."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from IPython.core.getipython import get_ipython
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
def page(strng, start=0, screen_lines=0, pager_cmd=None):
"""Print a string, piping through a pager.
This version ignores the screen_lines and pager_cmd arguments and uses
IPython's payload system instead.
Parameters
----------
strng : str or mime-dict
Text to page, or a mime-type keyed dict of already formatted data.
start : int
Starting line at which to place the display.
"""
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
shell = get_ipython()
if isinstance(strng, dict):
data = strng
else:
data = {'text/plain' : strng}
payload = dict(
source='page',
data=data,
- text=strng,
start=start,
screen_lines=screen_lines,
)
shell.payload_manager.write_payload(payload)
def install_payload_page():
"""Install this version of page as IPython.core.page.page."""
from IPython.core import page as corepage
corepage.page = page
|
3131f282d6ad1a703939c91c0d7dc0b3e4e54046
|
iati/versions.py
|
iati/versions.py
|
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number."""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
if not version_string in iati.constants.STANDARD_VERSIONS:
raise ValueError('A valid version number must be specified.')
|
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number.
Args:
version_string (str): A string representation of an IATI version number.
Raises:
TypeError: If an attempt to pass something that is not a string is made.
ValueError: If a provided string is not a version number.
"""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
if not version_string in iati.constants.STANDARD_VERSIONS:
raise ValueError('A valid version number must be specified.')
|
Document the current state of the Version class.
|
Document the current state of the Version class.
|
Python
|
mit
|
IATI/iati.core,IATI/iati.core
|
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
- """Initialise a Version Number."""
+ """Initialise a Version Number.
+
+ Args:
+ version_string (str): A string representation of an IATI version number.
+
+ Raises:
+ TypeError: If an attempt to pass something that is not a string is made.
+ ValueError: If a provided string is not a version number.
+
+ """
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
if not version_string in iati.constants.STANDARD_VERSIONS:
raise ValueError('A valid version number must be specified.')
|
Document the current state of the Version class.
|
## Code Before:
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number."""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
if not version_string in iati.constants.STANDARD_VERSIONS:
raise ValueError('A valid version number must be specified.')
## Instruction:
Document the current state of the Version class.
## Code After:
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number.
Args:
version_string (str): A string representation of an IATI version number.
Raises:
TypeError: If an attempt to pass something that is not a string is made.
ValueError: If a provided string is not a version number.
"""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
if not version_string in iati.constants.STANDARD_VERSIONS:
raise ValueError('A valid version number must be specified.')
|
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
- """Initialise a Version Number."""
? ---
+ """Initialise a Version Number.
+
+ Args:
+ version_string (str): A string representation of an IATI version number.
+
+ Raises:
+ TypeError: If an attempt to pass something that is not a string is made.
+ ValueError: If a provided string is not a version number.
+
+ """
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
if not version_string in iati.constants.STANDARD_VERSIONS:
raise ValueError('A valid version number must be specified.')
|
d4d409e52ce62053dd2ed40c1c5ee3ec7bce3ef3
|
src/hiss/handler/gntp/sync.py
|
src/hiss/handler/gntp/sync.py
|
import socket
from hiss.handler.gntp.message import Response
class GNTPHandler():
def register(self, notifier, target, **kwargs):
pass
def notify(self, notification, target):
pass
def unregister(self, notifier, target):
pass
def send_request(request, target, wait_for_response=True):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(target.address)
s.sendall(request.marshal())
if wait_for_response:
response_data = bytearray()
while True:
data = s.recv(1024)
if not data:
break
response_data.extend(data)
response = Response()
response.unmarshal(response_data)
return response
else:
response = None
s.close()
return response
|
import socket
from hiss.handler.gntp.message import Response
class GNTPHandler():
def register(self, notifier, target, **kwargs):
pass
def notify(self, notification, target):
pass
def unregister(self, notifier, target):
pass
def send_request(request, target, wait_for_response=True):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(target.address)
s.sendall(request.marshal())
if wait_for_response:
response_data = bytearray()
while True:
data = s.recv(1024)
if not data:
break
response_data.extend(data)
response = Response()
response.unmarshal(response_data)
else:
response = None
s.close()
return response
|
Make sure we close the socket
|
Make sure we close the socket
|
Python
|
apache-2.0
|
sffjunkie/hiss
|
import socket
from hiss.handler.gntp.message import Response
class GNTPHandler():
def register(self, notifier, target, **kwargs):
pass
def notify(self, notification, target):
pass
def unregister(self, notifier, target):
pass
def send_request(request, target, wait_for_response=True):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(target.address)
s.sendall(request.marshal())
if wait_for_response:
response_data = bytearray()
while True:
data = s.recv(1024)
if not data:
break
response_data.extend(data)
response = Response()
response.unmarshal(response_data)
- return response
else:
response = None
-
+
s.close()
return response
|
Make sure we close the socket
|
## Code Before:
import socket
from hiss.handler.gntp.message import Response
class GNTPHandler():
def register(self, notifier, target, **kwargs):
pass
def notify(self, notification, target):
pass
def unregister(self, notifier, target):
pass
def send_request(request, target, wait_for_response=True):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(target.address)
s.sendall(request.marshal())
if wait_for_response:
response_data = bytearray()
while True:
data = s.recv(1024)
if not data:
break
response_data.extend(data)
response = Response()
response.unmarshal(response_data)
return response
else:
response = None
s.close()
return response
## Instruction:
Make sure we close the socket
## Code After:
import socket
from hiss.handler.gntp.message import Response
class GNTPHandler():
def register(self, notifier, target, **kwargs):
pass
def notify(self, notification, target):
pass
def unregister(self, notifier, target):
pass
def send_request(request, target, wait_for_response=True):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(target.address)
s.sendall(request.marshal())
if wait_for_response:
response_data = bytearray()
while True:
data = s.recv(1024)
if not data:
break
response_data.extend(data)
response = Response()
response.unmarshal(response_data)
else:
response = None
s.close()
return response
|
import socket
from hiss.handler.gntp.message import Response
class GNTPHandler():
def register(self, notifier, target, **kwargs):
pass
def notify(self, notification, target):
pass
def unregister(self, notifier, target):
pass
def send_request(request, target, wait_for_response=True):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(target.address)
s.sendall(request.marshal())
if wait_for_response:
response_data = bytearray()
while True:
data = s.recv(1024)
if not data:
break
response_data.extend(data)
response = Response()
response.unmarshal(response_data)
- return response
else:
response = None
-
+
s.close()
return response
|
73d7377d0ba6c5ac768d547aaa957b48a6b1d46a
|
menu_generator/utils.py
|
menu_generator/utils.py
|
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does" +
"not have the pattern app.apps.AppConfig".format(app_path)
)
|
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does".format(app_path) +
"not have the pattern app.apps.AppConfig"
)
|
Fix exception message if app path is invalid
|
Fix exception message if app path is invalid
|
Python
|
mit
|
yamijuan/django-menu-generator
|
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
- "The application {0} is not in the configured apps or does" +
+ "The application {0} is not in the configured apps or does".format(app_path) +
- "not have the pattern app.apps.AppConfig".format(app_path)
+ "not have the pattern app.apps.AppConfig"
)
|
Fix exception message if app path is invalid
|
## Code Before:
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does" +
"not have the pattern app.apps.AppConfig".format(app_path)
)
## Instruction:
Fix exception message if app path is invalid
## Code After:
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does".format(app_path) +
"not have the pattern app.apps.AppConfig"
)
|
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
- "The application {0} is not in the configured apps or does" +
+ "The application {0} is not in the configured apps or does".format(app_path) +
? +++++++++++++++++
- "not have the pattern app.apps.AppConfig".format(app_path)
? -----------------
+ "not have the pattern app.apps.AppConfig"
)
|
11095d00dd1e4805739ffc376328e4ad2a6893fb
|
h2o-py/tests/testdir_algos/gbm/pyunit_cv_nfolds_gbm.py
|
h2o-py/tests/testdir_algos/gbm/pyunit_cv_nfolds_gbm.py
|
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def cv_nfolds_gbm():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
prostate[1] = prostate[1].asfactor()
prostate.summary()
prostate_gbm = H2OGradientBoostingEstimator(nfolds=5, distribution="bernoulli")
prostate_gbm.train(x=list(range(2,9)), y=1, training_frame=prostate)
prostate_gbm.show()
# Can specify both nfolds >= 2 and validation data at once
try:
H2OGradientBoostingEstimator(nfolds=5,
distribution="bernoulli").train(x=list(range(2,9)),
y=1,
training_frame=prostate,
validation_frame=prostate)
assert True
except EnvironmentError:
assert False, "expected an error"
if __name__ == "__main__":
pyunit_utils.standalone_test(cv_nfolds_gbm)
else:
cv_nfolds_gbm()
|
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def cv_nfolds_gbm():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
prostate[1] = prostate[1].asfactor()
prostate.summary()
prostate_gbm = H2OGradientBoostingEstimator(nfolds=5, distribution="bernoulli")
prostate_gbm.train(x=list(range(2,9)), y=1, training_frame=prostate)
prostate_gbm.show()
print(prostate_gbm.model_performance(xval=True))
# Can specify both nfolds >= 2 and validation data at once
try:
H2OGradientBoostingEstimator(nfolds=5,
distribution="bernoulli").train(x=list(range(2,9)),
y=1,
training_frame=prostate,
validation_frame=prostate)
assert True
except EnvironmentError:
assert False, "expected an error"
if __name__ == "__main__":
pyunit_utils.standalone_test(cv_nfolds_gbm)
else:
cv_nfolds_gbm()
|
Add pyunit test for model_performance(xval=True)
|
PUBDEV-2984: Add pyunit test for model_performance(xval=True)
|
Python
|
apache-2.0
|
mathemage/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,spennihana/h2o-3,mathemage/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,spennihana/h2o-3,h2oai/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,mathemage/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,mathemage/h2o-3,h2oai/h2o-3,michalkurka/h2o-3
|
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def cv_nfolds_gbm():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
prostate[1] = prostate[1].asfactor()
prostate.summary()
prostate_gbm = H2OGradientBoostingEstimator(nfolds=5, distribution="bernoulli")
prostate_gbm.train(x=list(range(2,9)), y=1, training_frame=prostate)
prostate_gbm.show()
+
+ print(prostate_gbm.model_performance(xval=True))
# Can specify both nfolds >= 2 and validation data at once
try:
H2OGradientBoostingEstimator(nfolds=5,
distribution="bernoulli").train(x=list(range(2,9)),
y=1,
training_frame=prostate,
validation_frame=prostate)
assert True
except EnvironmentError:
assert False, "expected an error"
if __name__ == "__main__":
pyunit_utils.standalone_test(cv_nfolds_gbm)
else:
cv_nfolds_gbm()
+
|
Add pyunit test for model_performance(xval=True)
|
## Code Before:
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def cv_nfolds_gbm():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
prostate[1] = prostate[1].asfactor()
prostate.summary()
prostate_gbm = H2OGradientBoostingEstimator(nfolds=5, distribution="bernoulli")
prostate_gbm.train(x=list(range(2,9)), y=1, training_frame=prostate)
prostate_gbm.show()
# Can specify both nfolds >= 2 and validation data at once
try:
H2OGradientBoostingEstimator(nfolds=5,
distribution="bernoulli").train(x=list(range(2,9)),
y=1,
training_frame=prostate,
validation_frame=prostate)
assert True
except EnvironmentError:
assert False, "expected an error"
if __name__ == "__main__":
pyunit_utils.standalone_test(cv_nfolds_gbm)
else:
cv_nfolds_gbm()
## Instruction:
Add pyunit test for model_performance(xval=True)
## Code After:
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def cv_nfolds_gbm():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
prostate[1] = prostate[1].asfactor()
prostate.summary()
prostate_gbm = H2OGradientBoostingEstimator(nfolds=5, distribution="bernoulli")
prostate_gbm.train(x=list(range(2,9)), y=1, training_frame=prostate)
prostate_gbm.show()
print(prostate_gbm.model_performance(xval=True))
# Can specify both nfolds >= 2 and validation data at once
try:
H2OGradientBoostingEstimator(nfolds=5,
distribution="bernoulli").train(x=list(range(2,9)),
y=1,
training_frame=prostate,
validation_frame=prostate)
assert True
except EnvironmentError:
assert False, "expected an error"
if __name__ == "__main__":
pyunit_utils.standalone_test(cv_nfolds_gbm)
else:
cv_nfolds_gbm()
|
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def cv_nfolds_gbm():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
prostate[1] = prostate[1].asfactor()
prostate.summary()
prostate_gbm = H2OGradientBoostingEstimator(nfolds=5, distribution="bernoulli")
prostate_gbm.train(x=list(range(2,9)), y=1, training_frame=prostate)
prostate_gbm.show()
+
+ print(prostate_gbm.model_performance(xval=True))
# Can specify both nfolds >= 2 and validation data at once
try:
H2OGradientBoostingEstimator(nfolds=5,
distribution="bernoulli").train(x=list(range(2,9)),
y=1,
training_frame=prostate,
validation_frame=prostate)
assert True
except EnvironmentError:
assert False, "expected an error"
if __name__ == "__main__":
pyunit_utils.standalone_test(cv_nfolds_gbm)
else:
cv_nfolds_gbm()
|
7cff4344538c59763560a9a86fda0f464f208b66
|
nightreads/user_manager/user_service.py
|
nightreads/user_manager/user_service.py
|
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import UserTag
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
user.usertag.tags.add(*tags_objs)
user.save()
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
UserTag.objects.create(user=user)
return user
|
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import UserTag
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.usertag.tags.clear()
user.usertag.tags.add(*tags_objs)
user.save()
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
UserTag.objects.create(user=user)
return user
|
Clear existing tags before updating
|
Clear existing tags before updating
|
Python
|
mit
|
avinassh/nightreads,avinassh/nightreads
|
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import UserTag
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
+ if tags_objs:
+ user.usertag.tags.clear()
- user.usertag.tags.add(*tags_objs)
+ user.usertag.tags.add(*tags_objs)
user.save()
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
UserTag.objects.create(user=user)
return user
|
Clear existing tags before updating
|
## Code Before:
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import UserTag
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
user.usertag.tags.add(*tags_objs)
user.save()
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
UserTag.objects.create(user=user)
return user
## Instruction:
Clear existing tags before updating
## Code After:
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import UserTag
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.usertag.tags.clear()
user.usertag.tags.add(*tags_objs)
user.save()
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
UserTag.objects.create(user=user)
return user
|
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import UserTag
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
+ if tags_objs:
+ user.usertag.tags.clear()
- user.usertag.tags.add(*tags_objs)
+ user.usertag.tags.add(*tags_objs)
? ++++
user.save()
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
UserTag.objects.create(user=user)
return user
|
0464ac83d8aca12193a7629e72b880d5b8e2707a
|
plinth/modules/first_boot/templatetags/firstboot_extras.py
|
plinth/modules/first_boot/templatetags/firstboot_extras.py
|
from django import template
from plinth import kvstore
register = template.Library()
@register.simple_tag
def firstboot_is_finished():
state = kvstore.get_default('firstboot_state', 0)
return state >= 10
|
from django import template
from plinth import kvstore
register = template.Library()
@register.simple_tag
def firstboot_is_finished():
"""Return whether firstboot process is completed."""
state = kvstore.get_default('firstboot_state', 0)
return state >= 10
|
Add doc strings for custom tags
|
firstboot: Add doc strings for custom tags
|
Python
|
agpl-3.0
|
vignanl/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,kkampardi/Plinth,harry-7/Plinth,freedomboxtwh/Plinth,kkampardi/Plinth,freedomboxtwh/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,vignanl/Plinth,kkampardi/Plinth,vignanl/Plinth,kkampardi/Plinth,vignanl/Plinth,harry-7/Plinth,harry-7/Plinth,kkampardi/Plinth,vignanl/Plinth,freedomboxtwh/Plinth
|
from django import template
from plinth import kvstore
register = template.Library()
@register.simple_tag
def firstboot_is_finished():
+ """Return whether firstboot process is completed."""
state = kvstore.get_default('firstboot_state', 0)
return state >= 10
|
Add doc strings for custom tags
|
## Code Before:
from django import template
from plinth import kvstore
register = template.Library()
@register.simple_tag
def firstboot_is_finished():
state = kvstore.get_default('firstboot_state', 0)
return state >= 10
## Instruction:
Add doc strings for custom tags
## Code After:
from django import template
from plinth import kvstore
register = template.Library()
@register.simple_tag
def firstboot_is_finished():
"""Return whether firstboot process is completed."""
state = kvstore.get_default('firstboot_state', 0)
return state >= 10
|
from django import template
from plinth import kvstore
register = template.Library()
@register.simple_tag
def firstboot_is_finished():
+ """Return whether firstboot process is completed."""
state = kvstore.get_default('firstboot_state', 0)
return state >= 10
|
251e13b96ed10e48b69ccf5d625d673a5507f222
|
requests_kerberos/__init__.py
|
requests_kerberos/__init__.py
|
import logging
import sys
from .kerberos_ import HTTPKerberosAuth, REQUIRED, OPTIONAL, DISABLED
from .exceptions import MutualAuthenticationError
from .compat import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
__all__ = [HTTPKerberosAuth, MutualAuthenticationError, REQUIRED, OPTIONAL, DISABLED]
__version__ = '0.1'
|
import logging
import sys
from .kerberos_ import HTTPKerberosAuth, REQUIRED, OPTIONAL, DISABLED
from .exceptions import MutualAuthenticationError
from .compat import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
__all__ = [HTTPKerberosAuth, MutualAuthenticationError]
__version__ = '0.1'
|
Remove REQUIRED, OPTIONAL, DISABLED from default exports
|
Remove REQUIRED, OPTIONAL, DISABLED from default exports
Prevent polluting the callers namespace with generically named constants.
|
Python
|
isc
|
requests/requests-kerberos,AbsoluteMSTR/requests-kerberos,rbcarson/requests-kerberos,requests/requests-kerberos,danc86/requests-kerberos
|
import logging
import sys
from .kerberos_ import HTTPKerberosAuth, REQUIRED, OPTIONAL, DISABLED
from .exceptions import MutualAuthenticationError
from .compat import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
- __all__ = [HTTPKerberosAuth, MutualAuthenticationError, REQUIRED, OPTIONAL, DISABLED]
+ __all__ = [HTTPKerberosAuth, MutualAuthenticationError]
__version__ = '0.1'
|
Remove REQUIRED, OPTIONAL, DISABLED from default exports
|
## Code Before:
import logging
import sys
from .kerberos_ import HTTPKerberosAuth, REQUIRED, OPTIONAL, DISABLED
from .exceptions import MutualAuthenticationError
from .compat import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
__all__ = [HTTPKerberosAuth, MutualAuthenticationError, REQUIRED, OPTIONAL, DISABLED]
__version__ = '0.1'
## Instruction:
Remove REQUIRED, OPTIONAL, DISABLED from default exports
## Code After:
import logging
import sys
from .kerberos_ import HTTPKerberosAuth, REQUIRED, OPTIONAL, DISABLED
from .exceptions import MutualAuthenticationError
from .compat import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
__all__ = [HTTPKerberosAuth, MutualAuthenticationError]
__version__ = '0.1'
|
import logging
import sys
from .kerberos_ import HTTPKerberosAuth, REQUIRED, OPTIONAL, DISABLED
from .exceptions import MutualAuthenticationError
from .compat import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
- __all__ = [HTTPKerberosAuth, MutualAuthenticationError, REQUIRED, OPTIONAL, DISABLED]
? ------------------------------
+ __all__ = [HTTPKerberosAuth, MutualAuthenticationError]
__version__ = '0.1'
|
5c29b4322d1a24c4f389076f2a9b8acbeabd89e2
|
python/lumidatumclient/classes.py
|
python/lumidatumclient/classes.py
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = model_id
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = model_if if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = str(model_if) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
Fix for os.path.join with model_id, was breaking on non-string model_id values.
|
Fix for os.path.join with model_id, was breaking on non-string model_id values.
|
Python
|
mit
|
Lumidatum/lumidatumclients,Lumidatum/lumidatumclients,daws/lumidatumclients,Lumidatum/lumidatumclients
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
- self.model_id = model_id
+ self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
- selected_model_id = model_if if model_id else self.model_id
+ selected_model_id = str(model_if) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
Fix for os.path.join with model_id, was breaking on non-string model_id values.
|
## Code Before:
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = model_id
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = model_if if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
## Instruction:
Fix for os.path.join with model_id, was breaking on non-string model_id values.
## Code After:
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = str(model_if) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
- self.model_id = model_id
+ self.model_id = str(model_id)
? ++++ +
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
- selected_model_id = model_if if model_id else self.model_id
+ selected_model_id = str(model_if) if model_id else self.model_id
? ++++ +
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
d8ae8f7bccdbe8eace5bb67b94a75a8003cc30b6
|
github/models.py
|
github/models.py
|
import json, requests
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page, Orderable
import django.utils.dateparse as dateparse
from django.db import models
from django.core.cache import cache
class GithubOrgIndexPage(Page):
github_org_name = models.CharField(default='City-of-Helsinki', max_length=200)
content_panels = Page.content_panels + [
FieldPanel('github_org_name'),
]
def events(self):
events = cache.get('github')
if not events:
response = requests.get('https://api.github.com/orgs/' + self.github_org_name + '/events?per_page=20')
if response.status_code == 200:
cache.add('github', response.json(), 60)
events = cache.get('github')
for index, event in enumerate(events):
event['created_at'] = dateparse.parse_datetime(event['created_at'])
# get html repo url
event['repo']['url'] = event['repo']['url'].replace('https://api.github.com/repos/', 'https://github.com/')
return events
def top_events(self):
return self.events()[:3]
|
import json, requests
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page, Orderable
import django.utils.dateparse as dateparse
from django.db import models
from django.core.cache import cache
class GithubOrgIndexPage(Page):
github_org_name = models.CharField(default='City-of-Helsinki', max_length=200)
content_panels = Page.content_panels + [
FieldPanel('github_org_name'),
]
def events(self):
events = cache.get('github')
if not events:
response = requests.get('https://api.github.com/orgs/' + self.github_org_name + '/events?per_page=20')
if response.status_code == 200:
cache.add('github', response.json(), 60)
events = cache.get('github')
for index, event in enumerate(events):
event['created_at'] = dateparse.parse_datetime(event['created_at'])
# get html repo url
event['repo']['url'] = event['repo']['url'].replace('https://api.github.com/repos/', 'https://github.com/')
return events
def top_events(self):
try:
return self.events()[:3]
except (TypeError, KeyError):
# not enough events
return None
|
Fix github top_events if events empty
|
Fix github top_events if events empty
|
Python
|
agpl-3.0
|
terotic/devheldev,terotic/devheldev,City-of-Helsinki/devheldev,terotic/devheldev,City-of-Helsinki/devheldev,City-of-Helsinki/devheldev
|
import json, requests
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page, Orderable
import django.utils.dateparse as dateparse
from django.db import models
from django.core.cache import cache
class GithubOrgIndexPage(Page):
github_org_name = models.CharField(default='City-of-Helsinki', max_length=200)
content_panels = Page.content_panels + [
FieldPanel('github_org_name'),
]
def events(self):
events = cache.get('github')
if not events:
response = requests.get('https://api.github.com/orgs/' + self.github_org_name + '/events?per_page=20')
if response.status_code == 200:
cache.add('github', response.json(), 60)
events = cache.get('github')
for index, event in enumerate(events):
event['created_at'] = dateparse.parse_datetime(event['created_at'])
# get html repo url
event['repo']['url'] = event['repo']['url'].replace('https://api.github.com/repos/', 'https://github.com/')
return events
def top_events(self):
+ try:
- return self.events()[:3]
+ return self.events()[:3]
+ except (TypeError, KeyError):
+ # not enough events
+ return None
|
Fix github top_events if events empty
|
## Code Before:
import json, requests
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page, Orderable
import django.utils.dateparse as dateparse
from django.db import models
from django.core.cache import cache
class GithubOrgIndexPage(Page):
github_org_name = models.CharField(default='City-of-Helsinki', max_length=200)
content_panels = Page.content_panels + [
FieldPanel('github_org_name'),
]
def events(self):
events = cache.get('github')
if not events:
response = requests.get('https://api.github.com/orgs/' + self.github_org_name + '/events?per_page=20')
if response.status_code == 200:
cache.add('github', response.json(), 60)
events = cache.get('github')
for index, event in enumerate(events):
event['created_at'] = dateparse.parse_datetime(event['created_at'])
# get html repo url
event['repo']['url'] = event['repo']['url'].replace('https://api.github.com/repos/', 'https://github.com/')
return events
def top_events(self):
return self.events()[:3]
## Instruction:
Fix github top_events if events empty
## Code After:
import json, requests
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page, Orderable
import django.utils.dateparse as dateparse
from django.db import models
from django.core.cache import cache
class GithubOrgIndexPage(Page):
github_org_name = models.CharField(default='City-of-Helsinki', max_length=200)
content_panels = Page.content_panels + [
FieldPanel('github_org_name'),
]
def events(self):
events = cache.get('github')
if not events:
response = requests.get('https://api.github.com/orgs/' + self.github_org_name + '/events?per_page=20')
if response.status_code == 200:
cache.add('github', response.json(), 60)
events = cache.get('github')
for index, event in enumerate(events):
event['created_at'] = dateparse.parse_datetime(event['created_at'])
# get html repo url
event['repo']['url'] = event['repo']['url'].replace('https://api.github.com/repos/', 'https://github.com/')
return events
def top_events(self):
try:
return self.events()[:3]
except (TypeError, KeyError):
# not enough events
return None
|
import json, requests
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page, Orderable
import django.utils.dateparse as dateparse
from django.db import models
from django.core.cache import cache
class GithubOrgIndexPage(Page):
github_org_name = models.CharField(default='City-of-Helsinki', max_length=200)
content_panels = Page.content_panels + [
FieldPanel('github_org_name'),
]
def events(self):
events = cache.get('github')
if not events:
response = requests.get('https://api.github.com/orgs/' + self.github_org_name + '/events?per_page=20')
if response.status_code == 200:
cache.add('github', response.json(), 60)
events = cache.get('github')
for index, event in enumerate(events):
event['created_at'] = dateparse.parse_datetime(event['created_at'])
# get html repo url
event['repo']['url'] = event['repo']['url'].replace('https://api.github.com/repos/', 'https://github.com/')
return events
def top_events(self):
+ try:
- return self.events()[:3]
+ return self.events()[:3]
? ++++
+ except (TypeError, KeyError):
+ # not enough events
+ return None
|
c1dfbc8e8b3ae29436c584d906636ea541dfb6a8
|
apps/storybase_asset/embedable_resource/__init__.py
|
apps/storybase_asset/embedable_resource/__init__.py
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
EmbedableResource.register(GoogleSpreadsheetProvider)
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
class GoogleMapProvider(EmbedableResourceProvider):
def get_html(self, url, width=425, height=350):
if not self.match(url):
raise UrlNotMatched
return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
EmbedableResource.register(GoogleSpreadsheetProvider)
EmbedableResource.register(GoogleMapProvider)
|
Allow embedding of Google Docs by URL
|
Allow embedding of Google Docs by URL
|
Python
|
mit
|
denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
+
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
- def get_html(self, url):
+ def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
+
+ class GoogleMapProvider(EmbedableResourceProvider):
+ def get_html(self, url, width=425, height=350):
+ if not self.match(url):
+ raise UrlNotMatched
+
+ return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
+
EmbedableResource.register(GoogleSpreadsheetProvider)
+ EmbedableResource.register(GoogleMapProvider)
|
Allow embedding of Google Docs by URL
|
## Code Before:
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
EmbedableResource.register(GoogleSpreadsheetProvider)
## Instruction:
Allow embedding of Google Docs by URL
## Code After:
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
class GoogleMapProvider(EmbedableResourceProvider):
def get_html(self, url, width=425, height=350):
if not self.match(url):
raise UrlNotMatched
return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
EmbedableResource.register(GoogleSpreadsheetProvider)
EmbedableResource.register(GoogleMapProvider)
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
+
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
- def get_html(self, url):
+ def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
+
+ class GoogleMapProvider(EmbedableResourceProvider):
+ def get_html(self, url, width=425, height=350):
+ if not self.match(url):
+ raise UrlNotMatched
+
+ return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
+
EmbedableResource.register(GoogleSpreadsheetProvider)
+ EmbedableResource.register(GoogleMapProvider)
|
f5f7eb086aff7cdc61bbfa850b638db5b7e0d211
|
tests/test_order.py
|
tests/test_order.py
|
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_is_not_not_found(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/')
assert res.status_code != 404
def test_order_is_accessible(self, testapp):
"""
Breaching the order?! Success!
"""
# testapp made available from the tests module
res = testapp.get('/orders/')
assert res.status_code == 200
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_gives_401_without_login(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/', expect_errors=True)
print(res)
print(res.status_code)
assert res.status_code == 401
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
Update test order to check for 401.
|
Update test order to check for 401.
|
Python
|
bsd-3-clause
|
robin-lee/store,tankca/store,tankca/store,William93/store,boomcan90/store,tankca/store,William93/store,William93/store,robin-lee/store,boomcan90/store,robin-lee/store,boomcan90/store
|
from flask import url_for
+
class TestBreakTheOrder:
"""
Breaking the order
"""
+
- def test_order_is_not_not_found(self, testapp):
+ def test_order_gives_401_without_login(self, testapp):
"""
- There actually is an order... Amazing.
+ There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
- res = testapp.get('/orders/')
+ res = testapp.get('/orders/', expect_errors=True)
+ print(res)
+ print(res.status_code)
- assert res.status_code != 404
+ assert res.status_code == 401
-
- def test_order_is_accessible(self, testapp):
- """
- Breaching the order?! Success!
- """
- # testapp made available from the tests module
- res = testapp.get('/orders/')
- assert res.status_code == 200
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
Update test order to check for 401.
|
## Code Before:
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_is_not_not_found(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/')
assert res.status_code != 404
def test_order_is_accessible(self, testapp):
"""
Breaching the order?! Success!
"""
# testapp made available from the tests module
res = testapp.get('/orders/')
assert res.status_code == 200
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
## Instruction:
Update test order to check for 401.
## Code After:
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_gives_401_without_login(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/', expect_errors=True)
print(res)
print(res.status_code)
assert res.status_code == 401
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
from flask import url_for
+
class TestBreakTheOrder:
"""
Breaking the order
"""
+
- def test_order_is_not_not_found(self, testapp):
? ^ --------
+ def test_order_gives_401_without_login(self, testapp):
? + ++ ^^^^^^^^ + ++++
"""
- There actually is an order... Amazing.
? -
+ There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
- res = testapp.get('/orders/')
+ res = testapp.get('/orders/', expect_errors=True)
? ++++++++++++++++++++
+ print(res)
+ print(res.status_code)
- assert res.status_code != 404
? ^ ^
+ assert res.status_code == 401
? ^ ^
-
- def test_order_is_accessible(self, testapp):
- """
- Breaching the order?! Success!
- """
- # testapp made available from the tests module
- res = testapp.get('/orders/')
- assert res.status_code == 200
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
efebbe998ac67810f6e0f86b685ab18f1ccf2bda
|
nio_cli/commands/config.py
|
nio_cli/commands/config.py
|
from .base import Base
import requests
class Config(Base):
""" Get basic nio info """
def __init__(self, options, *args, **kwargs):
super().__init__(options, *args, **kwargs)
self._resource = 'services' if self.options['services'] else 'blocks'
self._resource_name = \
self.options['<service-name>'] if self.options['services'] else \
self.options['<block-name>'] if self.options['blocks'] else \
""
def run(self):
response = requests.get(
self._base_url.format(
'{}/{}'.format(self._resource, self._resource_name)),
auth=self._auth)
try:
config = response.json()
print(requests.get(
self._base_url.format(self._resource),
json=config,
auth=self._auth).json())
except Exception as e:
print(e)
|
from .base import Base
import requests
class Config(Base):
""" Get basic nio info """
def __init__(self, options, *args, **kwargs):
super().__init__(options, *args, **kwargs)
self._resource = 'services' if self.options['services'] else 'blocks'
self._resource_name = \
self.options['<service-name>'] if self.options['services'] else \
self.options['<block-name>'] if self.options['blocks'] else \
""
def run(self):
response = requests.get(
self._base_url.format(
'{}/{}'.format(self._resource, self._resource_name)),
auth=self._auth)
try:
config = response.json()
print(config)
except Exception as e:
print(e)
|
Remove additional http get request
|
Remove additional http get request
|
Python
|
apache-2.0
|
nioinnovation/nio-cli,neutralio/nio-cli
|
from .base import Base
import requests
class Config(Base):
""" Get basic nio info """
def __init__(self, options, *args, **kwargs):
super().__init__(options, *args, **kwargs)
self._resource = 'services' if self.options['services'] else 'blocks'
self._resource_name = \
self.options['<service-name>'] if self.options['services'] else \
self.options['<block-name>'] if self.options['blocks'] else \
""
def run(self):
response = requests.get(
self._base_url.format(
'{}/{}'.format(self._resource, self._resource_name)),
auth=self._auth)
try:
config = response.json()
+ print(config)
- print(requests.get(
- self._base_url.format(self._resource),
- json=config,
- auth=self._auth).json())
except Exception as e:
print(e)
|
Remove additional http get request
|
## Code Before:
from .base import Base
import requests
class Config(Base):
""" Get basic nio info """
def __init__(self, options, *args, **kwargs):
super().__init__(options, *args, **kwargs)
self._resource = 'services' if self.options['services'] else 'blocks'
self._resource_name = \
self.options['<service-name>'] if self.options['services'] else \
self.options['<block-name>'] if self.options['blocks'] else \
""
def run(self):
response = requests.get(
self._base_url.format(
'{}/{}'.format(self._resource, self._resource_name)),
auth=self._auth)
try:
config = response.json()
print(requests.get(
self._base_url.format(self._resource),
json=config,
auth=self._auth).json())
except Exception as e:
print(e)
## Instruction:
Remove additional http get request
## Code After:
from .base import Base
import requests
class Config(Base):
""" Get basic nio info """
def __init__(self, options, *args, **kwargs):
super().__init__(options, *args, **kwargs)
self._resource = 'services' if self.options['services'] else 'blocks'
self._resource_name = \
self.options['<service-name>'] if self.options['services'] else \
self.options['<block-name>'] if self.options['blocks'] else \
""
def run(self):
response = requests.get(
self._base_url.format(
'{}/{}'.format(self._resource, self._resource_name)),
auth=self._auth)
try:
config = response.json()
print(config)
except Exception as e:
print(e)
|
from .base import Base
import requests
class Config(Base):
""" Get basic nio info """
def __init__(self, options, *args, **kwargs):
super().__init__(options, *args, **kwargs)
self._resource = 'services' if self.options['services'] else 'blocks'
self._resource_name = \
self.options['<service-name>'] if self.options['services'] else \
self.options['<block-name>'] if self.options['blocks'] else \
""
def run(self):
response = requests.get(
self._base_url.format(
'{}/{}'.format(self._resource, self._resource_name)),
auth=self._auth)
try:
config = response.json()
+ print(config)
- print(requests.get(
- self._base_url.format(self._resource),
- json=config,
- auth=self._auth).json())
except Exception as e:
print(e)
|
9bdf9455344b83fc28c5ecceafba82036bb2c75d
|
foodsaving/management/tests/test_makemessages.py
|
foodsaving/management/tests/test_makemessages.py
|
from unittest.mock import patch
from django.test import TestCase
from ..commands.makemessages import Command as MakeMessagesCommand
from django_jinja.management.commands.makemessages import Command as DjangoJinjaMakeMessagesCommand
makemessages = MakeMessagesCommand
django_jinja_makemessages = DjangoJinjaMakeMessagesCommand
class CustomMakeMessagesTest(TestCase):
def test_update_options(self):
options = {
'locale': [],
}
modified_options = MakeMessagesCommand.update_options(**options)
self.assertIn('jinja', modified_options['extensions'])
self.assertIn('en', modified_options['locale'])
options['extensions'] = ['py']
modified_options_with_initial_extension = MakeMessagesCommand.update_options(**options)
self.assertIn('jinja', modified_options_with_initial_extension['extensions'])
@patch(__name__ + '.django_jinja_makemessages.handle')
@patch(__name__ + '.makemessages.update_options', return_value={})
def test_handle(self, mock1, mock2):
MakeMessagesCommand.handle(MakeMessagesCommand())
assert MakeMessagesCommand.update_options.called
assert DjangoJinjaMakeMessagesCommand.handle.called
|
from unittest.mock import patch
from django.test import TestCase
from ..commands.makemessages import Command as MakeMessagesCommand
from django_jinja.management.commands.makemessages import Command as DjangoJinjaMakeMessagesCommand
makemessages = MakeMessagesCommand
django_jinja_makemessages = DjangoJinjaMakeMessagesCommand
class CustomMakeMessagesTest(TestCase):
def test_update_options(self):
options = {
'locale': [],
}
modified_options = MakeMessagesCommand.update_options(**options)
self.assertIn('jinja2', modified_options['extensions'])
self.assertIn('en', modified_options['locale'])
@patch(__name__ + '.django_jinja_makemessages.handle')
@patch(__name__ + '.makemessages.update_options', return_value={})
def test_handle(self, mock1, mock2):
MakeMessagesCommand.handle(MakeMessagesCommand())
assert MakeMessagesCommand.update_options.called
assert DjangoJinjaMakeMessagesCommand.handle.called
|
Fix makemessages test to look for jinja2
|
Fix makemessages test to look for jinja2
|
Python
|
agpl-3.0
|
yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend
|
from unittest.mock import patch
from django.test import TestCase
from ..commands.makemessages import Command as MakeMessagesCommand
from django_jinja.management.commands.makemessages import Command as DjangoJinjaMakeMessagesCommand
makemessages = MakeMessagesCommand
django_jinja_makemessages = DjangoJinjaMakeMessagesCommand
class CustomMakeMessagesTest(TestCase):
def test_update_options(self):
options = {
'locale': [],
}
modified_options = MakeMessagesCommand.update_options(**options)
- self.assertIn('jinja', modified_options['extensions'])
+ self.assertIn('jinja2', modified_options['extensions'])
self.assertIn('en', modified_options['locale'])
-
- options['extensions'] = ['py']
- modified_options_with_initial_extension = MakeMessagesCommand.update_options(**options)
- self.assertIn('jinja', modified_options_with_initial_extension['extensions'])
@patch(__name__ + '.django_jinja_makemessages.handle')
@patch(__name__ + '.makemessages.update_options', return_value={})
def test_handle(self, mock1, mock2):
MakeMessagesCommand.handle(MakeMessagesCommand())
assert MakeMessagesCommand.update_options.called
assert DjangoJinjaMakeMessagesCommand.handle.called
|
Fix makemessages test to look for jinja2
|
## Code Before:
from unittest.mock import patch
from django.test import TestCase
from ..commands.makemessages import Command as MakeMessagesCommand
from django_jinja.management.commands.makemessages import Command as DjangoJinjaMakeMessagesCommand
makemessages = MakeMessagesCommand
django_jinja_makemessages = DjangoJinjaMakeMessagesCommand
class CustomMakeMessagesTest(TestCase):
def test_update_options(self):
options = {
'locale': [],
}
modified_options = MakeMessagesCommand.update_options(**options)
self.assertIn('jinja', modified_options['extensions'])
self.assertIn('en', modified_options['locale'])
options['extensions'] = ['py']
modified_options_with_initial_extension = MakeMessagesCommand.update_options(**options)
self.assertIn('jinja', modified_options_with_initial_extension['extensions'])
@patch(__name__ + '.django_jinja_makemessages.handle')
@patch(__name__ + '.makemessages.update_options', return_value={})
def test_handle(self, mock1, mock2):
MakeMessagesCommand.handle(MakeMessagesCommand())
assert MakeMessagesCommand.update_options.called
assert DjangoJinjaMakeMessagesCommand.handle.called
## Instruction:
Fix makemessages test to look for jinja2
## Code After:
from unittest.mock import patch
from django.test import TestCase
from ..commands.makemessages import Command as MakeMessagesCommand
from django_jinja.management.commands.makemessages import Command as DjangoJinjaMakeMessagesCommand
makemessages = MakeMessagesCommand
django_jinja_makemessages = DjangoJinjaMakeMessagesCommand
class CustomMakeMessagesTest(TestCase):
def test_update_options(self):
options = {
'locale': [],
}
modified_options = MakeMessagesCommand.update_options(**options)
self.assertIn('jinja2', modified_options['extensions'])
self.assertIn('en', modified_options['locale'])
@patch(__name__ + '.django_jinja_makemessages.handle')
@patch(__name__ + '.makemessages.update_options', return_value={})
def test_handle(self, mock1, mock2):
MakeMessagesCommand.handle(MakeMessagesCommand())
assert MakeMessagesCommand.update_options.called
assert DjangoJinjaMakeMessagesCommand.handle.called
|
from unittest.mock import patch
from django.test import TestCase
from ..commands.makemessages import Command as MakeMessagesCommand
from django_jinja.management.commands.makemessages import Command as DjangoJinjaMakeMessagesCommand
makemessages = MakeMessagesCommand
django_jinja_makemessages = DjangoJinjaMakeMessagesCommand
class CustomMakeMessagesTest(TestCase):
def test_update_options(self):
options = {
'locale': [],
}
modified_options = MakeMessagesCommand.update_options(**options)
- self.assertIn('jinja', modified_options['extensions'])
+ self.assertIn('jinja2', modified_options['extensions'])
? +
self.assertIn('en', modified_options['locale'])
-
- options['extensions'] = ['py']
- modified_options_with_initial_extension = MakeMessagesCommand.update_options(**options)
- self.assertIn('jinja', modified_options_with_initial_extension['extensions'])
@patch(__name__ + '.django_jinja_makemessages.handle')
@patch(__name__ + '.makemessages.update_options', return_value={})
def test_handle(self, mock1, mock2):
MakeMessagesCommand.handle(MakeMessagesCommand())
assert MakeMessagesCommand.update_options.called
assert DjangoJinjaMakeMessagesCommand.handle.called
|
86446c6d1b0b8583562e0fccf1745e95ce7003c2
|
util/__init__.py
|
util/__init__.py
|
from __future__ import division
class HalError(RuntimeError):
def __init__(self, string=''):
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Dave'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that. {1}'.format(
username, string)
# Init base class
RuntimeError.__init__(self, text)
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Humanoid'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that.'.format(username)
# Append to exception
if len(e.args) == 0:
e.args = (text,)
elif len(e.args) == 1:
e.args = (text + ' ' + e.args[0],)
else:
e.args = (text,) + e.args
# Reraise the exception
raise
|
from __future__ import division
class HalError(RuntimeError):
def __init__(self, string=''):
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Dave'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that. {1}'.format(
username, string)
# Init base class
RuntimeError.__init__(self, text)
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
import logging
# Get the log and write the error to the log file
log = logging.getLogger(__name__)
log.error(e)
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Humanoid'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that.'.format(username)
# Append to exception
if len(e.args) == 0:
e.args = (text,)
elif len(e.args) == 1:
e.args = (text + ' ' + e.args[0],)
else:
e.args = (text,) + e.args
# Reraise the exception
raise
|
Print out errors to log.
|
Print out errors to log.
|
Python
|
bsd-3-clause
|
dials/dials,dials/dials,dials/dials,dials/dials,dials/dials
|
from __future__ import division
class HalError(RuntimeError):
def __init__(self, string=''):
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Dave'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that. {1}'.format(
username, string)
# Init base class
RuntimeError.__init__(self, text)
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
+ import logging
+
+ # Get the log and write the error to the log file
+ log = logging.getLogger(__name__)
+ log.error(e)
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Humanoid'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that.'.format(username)
# Append to exception
if len(e.args) == 0:
e.args = (text,)
elif len(e.args) == 1:
e.args = (text + ' ' + e.args[0],)
else:
e.args = (text,) + e.args
# Reraise the exception
raise
|
Print out errors to log.
|
## Code Before:
from __future__ import division
class HalError(RuntimeError):
def __init__(self, string=''):
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Dave'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that. {1}'.format(
username, string)
# Init base class
RuntimeError.__init__(self, text)
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Humanoid'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that.'.format(username)
# Append to exception
if len(e.args) == 0:
e.args = (text,)
elif len(e.args) == 1:
e.args = (text + ' ' + e.args[0],)
else:
e.args = (text,) + e.args
# Reraise the exception
raise
## Instruction:
Print out errors to log.
## Code After:
from __future__ import division
class HalError(RuntimeError):
def __init__(self, string=''):
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Dave'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that. {1}'.format(
username, string)
# Init base class
RuntimeError.__init__(self, text)
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
import logging
# Get the log and write the error to the log file
log = logging.getLogger(__name__)
log.error(e)
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Humanoid'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that.'.format(username)
# Append to exception
if len(e.args) == 0:
e.args = (text,)
elif len(e.args) == 1:
e.args = (text + ' ' + e.args[0],)
else:
e.args = (text,) + e.args
# Reraise the exception
raise
|
from __future__ import division
class HalError(RuntimeError):
def __init__(self, string=''):
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Dave'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that. {1}'.format(
username, string)
# Init base class
RuntimeError.__init__(self, text)
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
+ import logging
+
+ # Get the log and write the error to the log file
+ log = logging.getLogger(__name__)
+ log.error(e)
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Humanoid'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that.'.format(username)
# Append to exception
if len(e.args) == 0:
e.args = (text,)
elif len(e.args) == 1:
e.args = (text + ' ' + e.args[0],)
else:
e.args = (text,) + e.args
# Reraise the exception
raise
|
8d70645168ea4962359d67b00926f29544f4c506
|
organizations/managers.py
|
organizations/managers.py
|
from django.db import models
class OrgManager(models.Manager):
def get_for_user(self, user):
return self.get_query_set().filter(users=user)
class ActiveOrgManager(OrgManager):
"""
A more useful extension of the default manager which returns querysets
including only active organizations
"""
def get_query_set(self):
return super(ActiveOrgManager,
self).get_query_set().filter(is_active=True)
|
from django.db import models
class OrgManager(models.Manager):
def get_for_user(self, user):
if hasattr(self, 'get_queryset'):
return self.get_queryset().filter(users=user)
else:
# Deprecated method for older versions of Django
return self.get_query_set().filter(users=user)
class ActiveOrgManager(OrgManager):
"""
A more useful extension of the default manager which returns querysets
including only active organizations
"""
def get_queryset(self):
try:
return super(ActiveOrgManager,
self).get_queryset().filter(is_active=True)
except AttributeError:
# Deprecated method for older versions of Django.
return super(ActiveOrgManager,
self).get_query_set().filter(is_active=True)
get_query_set = get_queryset
|
Use get_queryset method by default
|
Use get_queryset method by default
Adds handler for get_query_set where the former method is not available
in the base manager class.
Closes gh-48
|
Python
|
bsd-2-clause
|
GauthamGoli/django-organizations,DESHRAJ/django-organizations,bennylope/django-organizations,st8st8/django-organizations,GauthamGoli/django-organizations,DESHRAJ/django-organizations,bennylope/django-organizations,st8st8/django-organizations
|
from django.db import models
class OrgManager(models.Manager):
def get_for_user(self, user):
+ if hasattr(self, 'get_queryset'):
+ return self.get_queryset().filter(users=user)
+ else:
+ # Deprecated method for older versions of Django
- return self.get_query_set().filter(users=user)
+ return self.get_query_set().filter(users=user)
class ActiveOrgManager(OrgManager):
"""
A more useful extension of the default manager which returns querysets
including only active organizations
"""
- def get_query_set(self):
+ def get_queryset(self):
+ try:
- return super(ActiveOrgManager,
+ return super(ActiveOrgManager,
+ self).get_queryset().filter(is_active=True)
+ except AttributeError:
+ # Deprecated method for older versions of Django.
+ return super(ActiveOrgManager,
- self).get_query_set().filter(is_active=True)
+ self).get_query_set().filter(is_active=True)
+ get_query_set = get_queryset
|
Use get_queryset method by default
|
## Code Before:
from django.db import models
class OrgManager(models.Manager):
def get_for_user(self, user):
return self.get_query_set().filter(users=user)
class ActiveOrgManager(OrgManager):
"""
A more useful extension of the default manager which returns querysets
including only active organizations
"""
def get_query_set(self):
return super(ActiveOrgManager,
self).get_query_set().filter(is_active=True)
## Instruction:
Use get_queryset method by default
## Code After:
from django.db import models
class OrgManager(models.Manager):
def get_for_user(self, user):
if hasattr(self, 'get_queryset'):
return self.get_queryset().filter(users=user)
else:
# Deprecated method for older versions of Django
return self.get_query_set().filter(users=user)
class ActiveOrgManager(OrgManager):
"""
A more useful extension of the default manager which returns querysets
including only active organizations
"""
def get_queryset(self):
try:
return super(ActiveOrgManager,
self).get_queryset().filter(is_active=True)
except AttributeError:
# Deprecated method for older versions of Django.
return super(ActiveOrgManager,
self).get_query_set().filter(is_active=True)
get_query_set = get_queryset
|
from django.db import models
class OrgManager(models.Manager):
def get_for_user(self, user):
+ if hasattr(self, 'get_queryset'):
+ return self.get_queryset().filter(users=user)
+ else:
+ # Deprecated method for older versions of Django
- return self.get_query_set().filter(users=user)
+ return self.get_query_set().filter(users=user)
? ++++
class ActiveOrgManager(OrgManager):
"""
A more useful extension of the default manager which returns querysets
including only active organizations
"""
- def get_query_set(self):
? -
+ def get_queryset(self):
+ try:
- return super(ActiveOrgManager,
+ return super(ActiveOrgManager,
? ++++
+ self).get_queryset().filter(is_active=True)
+ except AttributeError:
+ # Deprecated method for older versions of Django.
+ return super(ActiveOrgManager,
- self).get_query_set().filter(is_active=True)
+ self).get_query_set().filter(is_active=True)
? ++++
+ get_query_set = get_queryset
|
7fad37d5a1121fe87db8946645043cd31a78b093
|
pi_gpio/events.py
|
pi_gpio/events.py
|
from pi_gpio import socketio
from config.pins import PinManager
class PinEventManager(PinManager):
def __init__(self):
super(PinEventManager, self).__init__()
self.socketio = socketio
self.edge = {
'RISING': self.gpio.RISING,
'FALLING': self.gpio.FALLING,
'BOTH': self.gpio.BOTH
}
def build_event_callback(self, num, name, event):
def event_callback(num):
data = {
'num': num,
'name': name,
'event': event
}
self.socketio.emit('pin:event', data)
print(data)
return event_callback
def register_gpio_events(self):
for num, config in self.pins.items():
event = config.get('event', None)
name = config.get('name', '')
if event:
edge = self.edge[event]
bounce = config['bounce']
cb = self.build_event_callback(num, name, event)
self.gpio.add_event_detect(num, edge, callback=cb, bouncetime=bounce)
|
from pi_gpio import socketio
from config.pins import PinManager
class PinEventManager(PinManager):
def __init__(self):
super(PinEventManager, self).__init__()
self.socketio = socketio
self.edge = {
'RISING': self.gpio.RISING,
'FALLING': self.gpio.FALLING,
'BOTH': self.gpio.BOTH
}
def build_event_callback(self, num, name, event):
def event_callback(num):
data = {
'num': num,
'name': name,
'event': event
}
self.socketio.emit('pin:event', data)
print(data)
return event_callback
def register_gpio_events(self):
for num, config in self.pins.items():
event = config.get('event', None)
name = config.get('name', '')
if event:
edge = self.edge[event]
bounce = config.get('bounce', -666)
cb = self.build_event_callback(num, name, event)
self.gpio.add_event_detect(num, edge, callback=cb, bouncetime=bounce)
|
Set the default bouncetime value to -666
|
Set the default bouncetime value to -666
Set the default bouncetime to -666 (the default value -666 is in Rpi.GPIO source code).
As-Is: if the bouncetime is not set, your setting for event detecting is silently down. And there is no notification that bouncetime is required.
|
Python
|
mit
|
projectweekend/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server
|
from pi_gpio import socketio
from config.pins import PinManager
class PinEventManager(PinManager):
def __init__(self):
super(PinEventManager, self).__init__()
self.socketio = socketio
self.edge = {
'RISING': self.gpio.RISING,
'FALLING': self.gpio.FALLING,
'BOTH': self.gpio.BOTH
}
def build_event_callback(self, num, name, event):
def event_callback(num):
data = {
'num': num,
'name': name,
'event': event
}
self.socketio.emit('pin:event', data)
print(data)
return event_callback
def register_gpio_events(self):
for num, config in self.pins.items():
event = config.get('event', None)
name = config.get('name', '')
if event:
edge = self.edge[event]
- bounce = config['bounce']
+ bounce = config.get('bounce', -666)
cb = self.build_event_callback(num, name, event)
self.gpio.add_event_detect(num, edge, callback=cb, bouncetime=bounce)
|
Set the default bouncetime value to -666
|
## Code Before:
from pi_gpio import socketio
from config.pins import PinManager
class PinEventManager(PinManager):
def __init__(self):
super(PinEventManager, self).__init__()
self.socketio = socketio
self.edge = {
'RISING': self.gpio.RISING,
'FALLING': self.gpio.FALLING,
'BOTH': self.gpio.BOTH
}
def build_event_callback(self, num, name, event):
def event_callback(num):
data = {
'num': num,
'name': name,
'event': event
}
self.socketio.emit('pin:event', data)
print(data)
return event_callback
def register_gpio_events(self):
for num, config in self.pins.items():
event = config.get('event', None)
name = config.get('name', '')
if event:
edge = self.edge[event]
bounce = config['bounce']
cb = self.build_event_callback(num, name, event)
self.gpio.add_event_detect(num, edge, callback=cb, bouncetime=bounce)
## Instruction:
Set the default bouncetime value to -666
## Code After:
from pi_gpio import socketio
from config.pins import PinManager
class PinEventManager(PinManager):
def __init__(self):
super(PinEventManager, self).__init__()
self.socketio = socketio
self.edge = {
'RISING': self.gpio.RISING,
'FALLING': self.gpio.FALLING,
'BOTH': self.gpio.BOTH
}
def build_event_callback(self, num, name, event):
def event_callback(num):
data = {
'num': num,
'name': name,
'event': event
}
self.socketio.emit('pin:event', data)
print(data)
return event_callback
def register_gpio_events(self):
for num, config in self.pins.items():
event = config.get('event', None)
name = config.get('name', '')
if event:
edge = self.edge[event]
bounce = config.get('bounce', -666)
cb = self.build_event_callback(num, name, event)
self.gpio.add_event_detect(num, edge, callback=cb, bouncetime=bounce)
|
from pi_gpio import socketio
from config.pins import PinManager
class PinEventManager(PinManager):
def __init__(self):
super(PinEventManager, self).__init__()
self.socketio = socketio
self.edge = {
'RISING': self.gpio.RISING,
'FALLING': self.gpio.FALLING,
'BOTH': self.gpio.BOTH
}
def build_event_callback(self, num, name, event):
def event_callback(num):
data = {
'num': num,
'name': name,
'event': event
}
self.socketio.emit('pin:event', data)
print(data)
return event_callback
def register_gpio_events(self):
for num, config in self.pins.items():
event = config.get('event', None)
name = config.get('name', '')
if event:
edge = self.edge[event]
- bounce = config['bounce']
? ^ ^
+ bounce = config.get('bounce', -666)
? ^^^^^ ^^^^^^^
cb = self.build_event_callback(num, name, event)
self.gpio.add_event_detect(num, edge, callback=cb, bouncetime=bounce)
|
c129b435a7759104feaaa5b828dc2f2ac46d5ab1
|
src/cmdlinetest/afp_mock.py
|
src/cmdlinetest/afp_mock.py
|
from bottle import route
from textwrap import dedent
from bottledaemon import daemon_run
""" Simple AFP mock to allow testing the afp-cli. """
@route('/account')
def account():
return """{"test_account": ["test_role"]}"""
@route('/account/<account>/<role>')
def credentials(account, role):
return dedent("""
{"Code": "Success",
"LastUpdated": "1970-01-01T00:00:00Z",
"AccessKeyId": "XXXXXXXXXXXX",
"SecretAccessKey": "XXXXXXXXXXXX",
"Token": "XXXXXXXXXXXX",
"Expiration": "2032-01-01T00:00:00Z",
"Type": "AWS-HMAC"}""").strip()
daemon_run(host='localhost', port=5555)
|
""" Simple AFP mock to allow testing the afp-cli. """
from bottle import route
from textwrap import dedent
from bottledaemon import daemon_run
@route('/account')
def account():
return """{"test_account": ["test_role"]}"""
@route('/account/<account>/<role>')
def credentials(account, role):
return dedent("""
{"Code": "Success",
"LastUpdated": "1970-01-01T00:00:00Z",
"AccessKeyId": "XXXXXXXXXXXX",
"SecretAccessKey": "XXXXXXXXXXXX",
"Token": "XXXXXXXXXXXX",
"Expiration": "2032-01-01T00:00:00Z",
"Type": "AWS-HMAC"}""").strip()
daemon_run(host='localhost', port=5555)
|
Move string above the imports so it becomes a docstring
|
Move string above the imports so it becomes a docstring
|
Python
|
apache-2.0
|
ImmobilienScout24/afp-cli,ImmobilienScout24/afp-cli,ImmobilienScout24/afp-cli
|
+ """ Simple AFP mock to allow testing the afp-cli. """
+
from bottle import route
from textwrap import dedent
from bottledaemon import daemon_run
-
- """ Simple AFP mock to allow testing the afp-cli. """
@route('/account')
def account():
return """{"test_account": ["test_role"]}"""
@route('/account/<account>/<role>')
def credentials(account, role):
return dedent("""
{"Code": "Success",
"LastUpdated": "1970-01-01T00:00:00Z",
"AccessKeyId": "XXXXXXXXXXXX",
"SecretAccessKey": "XXXXXXXXXXXX",
"Token": "XXXXXXXXXXXX",
"Expiration": "2032-01-01T00:00:00Z",
"Type": "AWS-HMAC"}""").strip()
daemon_run(host='localhost', port=5555)
|
Move string above the imports so it becomes a docstring
|
## Code Before:
from bottle import route
from textwrap import dedent
from bottledaemon import daemon_run
""" Simple AFP mock to allow testing the afp-cli. """
@route('/account')
def account():
return """{"test_account": ["test_role"]}"""
@route('/account/<account>/<role>')
def credentials(account, role):
return dedent("""
{"Code": "Success",
"LastUpdated": "1970-01-01T00:00:00Z",
"AccessKeyId": "XXXXXXXXXXXX",
"SecretAccessKey": "XXXXXXXXXXXX",
"Token": "XXXXXXXXXXXX",
"Expiration": "2032-01-01T00:00:00Z",
"Type": "AWS-HMAC"}""").strip()
daemon_run(host='localhost', port=5555)
## Instruction:
Move string above the imports so it becomes a docstring
## Code After:
""" Simple AFP mock to allow testing the afp-cli. """
from bottle import route
from textwrap import dedent
from bottledaemon import daemon_run
@route('/account')
def account():
return """{"test_account": ["test_role"]}"""
@route('/account/<account>/<role>')
def credentials(account, role):
return dedent("""
{"Code": "Success",
"LastUpdated": "1970-01-01T00:00:00Z",
"AccessKeyId": "XXXXXXXXXXXX",
"SecretAccessKey": "XXXXXXXXXXXX",
"Token": "XXXXXXXXXXXX",
"Expiration": "2032-01-01T00:00:00Z",
"Type": "AWS-HMAC"}""").strip()
daemon_run(host='localhost', port=5555)
|
+ """ Simple AFP mock to allow testing the afp-cli. """
+
from bottle import route
from textwrap import dedent
from bottledaemon import daemon_run
-
- """ Simple AFP mock to allow testing the afp-cli. """
@route('/account')
def account():
return """{"test_account": ["test_role"]}"""
@route('/account/<account>/<role>')
def credentials(account, role):
return dedent("""
{"Code": "Success",
"LastUpdated": "1970-01-01T00:00:00Z",
"AccessKeyId": "XXXXXXXXXXXX",
"SecretAccessKey": "XXXXXXXXXXXX",
"Token": "XXXXXXXXXXXX",
"Expiration": "2032-01-01T00:00:00Z",
"Type": "AWS-HMAC"}""").strip()
daemon_run(host='localhost', port=5555)
|
b545ebcd2b604bf293bfbbb1af5a9ab2ba6965c7
|
wayback3/wayback3.py
|
wayback3/wayback3.py
|
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
WAYBACK_URL_ROOT = "http://web.archive.org"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
Add a constant for WB root URL
|
Add a constant for WB root URL
|
Python
|
agpl-3.0
|
OpenSSR/openssr-parser,OpenSSR/openssr-parser
|
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
+ WAYBACK_URL_ROOT = "http://web.archive.org"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
Add a constant for WB root URL
|
## Code Before:
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
## Instruction:
Add a constant for WB root URL
## Code After:
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
WAYBACK_URL_ROOT = "http://web.archive.org"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
+ WAYBACK_URL_ROOT = "http://web.archive.org"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
aee157ce27aa4f00a798b87e07583dc795265eb4
|
methodAndKnottiness/reliability.py
|
methodAndKnottiness/reliability.py
|
import sys
count = 0
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
cost = int(c)
reliability = float(r)
count+=1
print("Fin")
|
import sys, math
count = 0
cost=[]
reliability=[]
# Read input. Budget, number of machines, cost and reliability
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
cost.append(int(c))
reliability.append(float(r))
count+=1
M = [[0 for i in range(B)] for i in range(N)]
for i in range(B):
M[0][i]=1
print(cost)
#for i in range(1,N):
for i in range(1,3):
for b in range(0,B):
max = 0
# break
for k in range(0, math.floor(b/cost[i])):
m = M[i-1][b-k*cost[i]]*(1-reliability[i])**k
if m > max:
max = m
print("new max",max)
print("Budget:", B)
print("Number machines:", N)
# print("\nIterated Version:")
# print(M[0:3])
print("Fin")
|
Save point, got code written but need to figure out the base probabilities
|
Save point, got code written but need to figure out the base probabilities
|
Python
|
mit
|
scrasmussen/ProsaicOeuvre,scrasmussen/ProsaicOeuvre,scrasmussen/ProsaicOeuvre
|
- import sys
+ import sys, math
count = 0
+ cost=[]
+ reliability=[]
+
+ # Read input. Budget, number of machines, cost and reliability
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
- cost = int(c)
+ cost.append(int(c))
- reliability = float(r)
+ reliability.append(float(r))
-
+
count+=1
+ M = [[0 for i in range(B)] for i in range(N)]
+ for i in range(B):
+ M[0][i]=1
+
+
+ print(cost)
+ #for i in range(1,N):
+ for i in range(1,3):
+ for b in range(0,B):
+ max = 0
+ # break
+ for k in range(0, math.floor(b/cost[i])):
+ m = M[i-1][b-k*cost[i]]*(1-reliability[i])**k
+ if m > max:
+ max = m
+ print("new max",max)
+
+ print("Budget:", B)
+ print("Number machines:", N)
+ # print("\nIterated Version:")
+ # print(M[0:3])
print("Fin")
|
Save point, got code written but need to figure out the base probabilities
|
## Code Before:
import sys
count = 0
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
cost = int(c)
reliability = float(r)
count+=1
print("Fin")
## Instruction:
Save point, got code written but need to figure out the base probabilities
## Code After:
import sys, math
count = 0
cost=[]
reliability=[]
# Read input. Budget, number of machines, cost and reliability
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
cost.append(int(c))
reliability.append(float(r))
count+=1
M = [[0 for i in range(B)] for i in range(N)]
for i in range(B):
M[0][i]=1
print(cost)
#for i in range(1,N):
for i in range(1,3):
for b in range(0,B):
max = 0
# break
for k in range(0, math.floor(b/cost[i])):
m = M[i-1][b-k*cost[i]]*(1-reliability[i])**k
if m > max:
max = m
print("new max",max)
print("Budget:", B)
print("Number machines:", N)
# print("\nIterated Version:")
# print(M[0:3])
print("Fin")
|
- import sys
+ import sys, math
? ++++++
count = 0
+ cost=[]
+ reliability=[]
+
+ # Read input. Budget, number of machines, cost and reliability
for line in sys.stdin:
if count == 0:
B = int(line)
elif count == 1:
N = int(line)
else:
c, r = line.rstrip().split(' ')
- cost = int(c)
? ^^^
+ cost.append(int(c))
? ^^^^^^^^ +
- reliability = float(r)
? ^^^
+ reliability.append(float(r))
? ^^^^^^^^ +
-
+
count+=1
+ M = [[0 for i in range(B)] for i in range(N)]
+ for i in range(B):
+ M[0][i]=1
+
+
+ print(cost)
+ #for i in range(1,N):
+ for i in range(1,3):
+ for b in range(0,B):
+ max = 0
+ # break
+ for k in range(0, math.floor(b/cost[i])):
+ m = M[i-1][b-k*cost[i]]*(1-reliability[i])**k
+ if m > max:
+ max = m
+ print("new max",max)
+
+ print("Budget:", B)
+ print("Number machines:", N)
+ # print("\nIterated Version:")
+ # print(M[0:3])
print("Fin")
|
a0740ec8373a3a178e3e83b4ec2768621c697181
|
versions/rattoolsversions.py
|
versions/rattoolsversions.py
|
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools42(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools42, self).__init__("rattools-4.2", system, "root-5.34.02", "rat-4.2",
"release-4.20")
class RatTools41(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools41, self).__init__("rattools-4.1", system, "root-5.34.02", "rat-4.1",
"release-4.10")
class RatTools4(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools4, self).__init__("rattools-4", system, "root-5.32.04", "rat-4",
"release-4.00")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
Add fixed release rat-tools versions 4, 4.1, 4.2
|
Add fixed release rat-tools versions 4, 4.1, 4.2
|
Python
|
mit
|
mjmottram/snoing,mjmottram/snoing
|
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
+ class RatTools42(rattools.RatToolsRelease):
+ def __init__(self, system):
+ """ Initialise an arbitrary snaphot version."""
+ super(RatTools42, self).__init__("rattools-4.2", system, "root-5.34.02", "rat-4.2",
+ "release-4.20")
+
+ class RatTools41(rattools.RatToolsRelease):
+ def __init__(self, system):
+ """ Initialise an arbitrary snaphot version."""
+ super(RatTools41, self).__init__("rattools-4.1", system, "root-5.34.02", "rat-4.1",
+ "release-4.10")
+ class RatTools4(rattools.RatToolsRelease):
+ def __init__(self, system):
+ """ Initialise an arbitrary snaphot version."""
+ super(RatTools4, self).__init__("rattools-4", system, "root-5.32.04", "rat-4",
+ "release-4.00")
+
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
Add fixed release rat-tools versions 4, 4.1, 4.2
|
## Code Before:
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
## Instruction:
Add fixed release rat-tools versions 4, 4.1, 4.2
## Code After:
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools42(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools42, self).__init__("rattools-4.2", system, "root-5.34.02", "rat-4.2",
"release-4.20")
class RatTools41(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools41, self).__init__("rattools-4.1", system, "root-5.34.02", "rat-4.1",
"release-4.10")
class RatTools4(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools4, self).__init__("rattools-4", system, "root-5.32.04", "rat-4",
"release-4.00")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
+ class RatTools42(rattools.RatToolsRelease):
+ def __init__(self, system):
+ """ Initialise an arbitrary snaphot version."""
+ super(RatTools42, self).__init__("rattools-4.2", system, "root-5.34.02", "rat-4.2",
+ "release-4.20")
+
+ class RatTools41(rattools.RatToolsRelease):
+ def __init__(self, system):
+ """ Initialise an arbitrary snaphot version."""
+ super(RatTools41, self).__init__("rattools-4.1", system, "root-5.34.02", "rat-4.1",
+ "release-4.10")
+ class RatTools4(rattools.RatToolsRelease):
+ def __init__(self, system):
+ """ Initialise an arbitrary snaphot version."""
+ super(RatTools4, self).__init__("rattools-4", system, "root-5.32.04", "rat-4",
+ "release-4.00")
+
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
adcb7af597c77d85eb9234d91e2c0bd8575630e1
|
fcm_django/api/__init__.py
|
fcm_django/api/__init__.py
|
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceResource, GCMDeviceResource, WNSDeviceResource, APNSDeviceAuthenticatedResource, \
GCMDeviceAuthenticatedResource, WNSDeviceAuthenticatedResource
__all__ = [
"APNSDeviceResource",
"GCMDeviceResource",
"WNSDeviceResource",
"APNSDeviceAuthenticatedResource",
"GCMDeviceAuthenticatedResource",
"WNSDeviceAuthenticatedResource",
]
|
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceAuthenticatedResource, FCMDeviceResource
__all__ = [
"APNSDeviceAuthenticatedResource",
"FCMDeviceResource",
]
|
Remove references to old resources
|
Remove references to old resources
|
Python
|
mit
|
xtrinch/fcm-django
|
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
+ from .tastypie import APNSDeviceAuthenticatedResource, FCMDeviceResource
- from .tastypie import APNSDeviceResource, GCMDeviceResource, WNSDeviceResource, APNSDeviceAuthenticatedResource, \
- GCMDeviceAuthenticatedResource, WNSDeviceAuthenticatedResource
__all__ = [
- "APNSDeviceResource",
- "GCMDeviceResource",
- "WNSDeviceResource",
"APNSDeviceAuthenticatedResource",
+ "FCMDeviceResource",
- "GCMDeviceAuthenticatedResource",
- "WNSDeviceAuthenticatedResource",
]
|
Remove references to old resources
|
## Code Before:
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceResource, GCMDeviceResource, WNSDeviceResource, APNSDeviceAuthenticatedResource, \
GCMDeviceAuthenticatedResource, WNSDeviceAuthenticatedResource
__all__ = [
"APNSDeviceResource",
"GCMDeviceResource",
"WNSDeviceResource",
"APNSDeviceAuthenticatedResource",
"GCMDeviceAuthenticatedResource",
"WNSDeviceAuthenticatedResource",
]
## Instruction:
Remove references to old resources
## Code After:
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceAuthenticatedResource, FCMDeviceResource
__all__ = [
"APNSDeviceAuthenticatedResource",
"FCMDeviceResource",
]
|
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
+ from .tastypie import APNSDeviceAuthenticatedResource, FCMDeviceResource
- from .tastypie import APNSDeviceResource, GCMDeviceResource, WNSDeviceResource, APNSDeviceAuthenticatedResource, \
- GCMDeviceAuthenticatedResource, WNSDeviceAuthenticatedResource
__all__ = [
- "APNSDeviceResource",
- "GCMDeviceResource",
- "WNSDeviceResource",
"APNSDeviceAuthenticatedResource",
+ "FCMDeviceResource",
- "GCMDeviceAuthenticatedResource",
- "WNSDeviceAuthenticatedResource",
]
|
9cf6e843eeb865eeaf90e4023bdccd1325e74535
|
test_rle.py
|
test_rle.py
|
import pypolycomp
import numpy as np
def test_compression():
for cur_type in (np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64):
compressed = pypolycomp.rle_compress(np.array([1, 1, 1, 2, 3], dtype=cur_type))
assert np.all(compressed == np.array([3, 1, 1, 2, 1, 3], dtype=cur_type))
|
import pypolycomp
import numpy as np
def test_compression():
for cur_type in (np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64):
compressed = pypolycomp.rle_compress(np.array([1, 1, 1, 2, 3], dtype=cur_type))
assert np.all(compressed == np.array([3, 1, 1, 2, 1, 3], dtype=cur_type))
def test_decompression():
for cur_type in (np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64):
input_values = np.array(np.random.randint(100, size=1000),
dtype=cur_type)
compressed = pypolycomp.rle_compress(input_values)
output_values = pypolycomp.rle_decompress(compressed)
assert np.all(input_values == output_values)
|
Add test for RLE decompression
|
Add test for RLE decompression
|
Python
|
bsd-3-clause
|
ziotom78/polycomp
|
import pypolycomp
import numpy as np
def test_compression():
for cur_type in (np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64):
compressed = pypolycomp.rle_compress(np.array([1, 1, 1, 2, 3], dtype=cur_type))
assert np.all(compressed == np.array([3, 1, 1, 2, 1, 3], dtype=cur_type))
+ def test_decompression():
+ for cur_type in (np.int8, np.int16, np.int32, np.int64,
+ np.uint8, np.uint16, np.uint32, np.uint64):
+ input_values = np.array(np.random.randint(100, size=1000),
+ dtype=cur_type)
+ compressed = pypolycomp.rle_compress(input_values)
+ output_values = pypolycomp.rle_decompress(compressed)
+ assert np.all(input_values == output_values)
|
Add test for RLE decompression
|
## Code Before:
import pypolycomp
import numpy as np
def test_compression():
for cur_type in (np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64):
compressed = pypolycomp.rle_compress(np.array([1, 1, 1, 2, 3], dtype=cur_type))
assert np.all(compressed == np.array([3, 1, 1, 2, 1, 3], dtype=cur_type))
## Instruction:
Add test for RLE decompression
## Code After:
import pypolycomp
import numpy as np
def test_compression():
for cur_type in (np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64):
compressed = pypolycomp.rle_compress(np.array([1, 1, 1, 2, 3], dtype=cur_type))
assert np.all(compressed == np.array([3, 1, 1, 2, 1, 3], dtype=cur_type))
def test_decompression():
for cur_type in (np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64):
input_values = np.array(np.random.randint(100, size=1000),
dtype=cur_type)
compressed = pypolycomp.rle_compress(input_values)
output_values = pypolycomp.rle_decompress(compressed)
assert np.all(input_values == output_values)
|
import pypolycomp
import numpy as np
def test_compression():
for cur_type in (np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64):
compressed = pypolycomp.rle_compress(np.array([1, 1, 1, 2, 3], dtype=cur_type))
assert np.all(compressed == np.array([3, 1, 1, 2, 1, 3], dtype=cur_type))
+ def test_decompression():
+ for cur_type in (np.int8, np.int16, np.int32, np.int64,
+ np.uint8, np.uint16, np.uint32, np.uint64):
+ input_values = np.array(np.random.randint(100, size=1000),
+ dtype=cur_type)
+ compressed = pypolycomp.rle_compress(input_values)
+ output_values = pypolycomp.rle_decompress(compressed)
+ assert np.all(input_values == output_values)
|
ebc5d1024c45a12595507fa1caa0bfc6353a9a32
|
c2cgeoportal/views/echo.py
|
c2cgeoportal/views/echo.py
|
import os.path
import re
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.response import Response
from pyramid.view import view_config
@view_config(route_name='echo')
def echo(request):
if request.method != 'POST':
raise HTTPBadRequest()
try:
file = request.POST['file']
except KeyError:
raise HTTPBadRequest()
response = Response()
response.app_iter = file.file
response.content_type = 'application/octet-stream'
return response
|
from base64 import b64encode
import os.path
import re
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.response import Response
from pyramid.view import view_config
def base64_encode_chunks(file, chunk_size=57):
"""
Generate base64 encoded lines of up to 76 (== 57 * 8 / 6) characters, according to RFC2045.
See http://en.wikipedia.org/wiki/Base64
"""
while True:
line = file.read(chunk_size)
if not line:
break
yield b64encode(line) + '\n'
@view_config(route_name='echo')
def echo(request):
"""
Echo an uploaded file back to the client as an text/html document so it can be handled by Ext.
The response is base64 encoded to ensure that there are no special HTML characters or charset problems.
See http://docs.sencha.com/ext-js/3-4/#!/api/Ext.form.BasicForm-cfg-fileUpload
"""
if request.method != 'POST':
raise HTTPBadRequest()
try:
file = request.POST['file']
except KeyError:
raise HTTPBadRequest()
response = Response()
response.app_iter = base64_encode_chunks(file.file)
response.content_type = 'text/html'
return response
|
Return a base64 text/html response instead of a binary response
|
Return a base64 text/html response instead of a binary response
|
Python
|
bsd-2-clause
|
tsauerwein/c2cgeoportal,tsauerwein/c2cgeoportal,tsauerwein/c2cgeoportal,tsauerwein/c2cgeoportal
|
+ from base64 import b64encode
import os.path
import re
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.response import Response
from pyramid.view import view_config
+ def base64_encode_chunks(file, chunk_size=57):
+ """
+ Generate base64 encoded lines of up to 76 (== 57 * 8 / 6) characters, according to RFC2045.
+ See http://en.wikipedia.org/wiki/Base64
+ """
+ while True:
+ line = file.read(chunk_size)
+ if not line:
+ break
+ yield b64encode(line) + '\n'
+
+
@view_config(route_name='echo')
def echo(request):
+ """
+ Echo an uploaded file back to the client as an text/html document so it can be handled by Ext.
+ The response is base64 encoded to ensure that there are no special HTML characters or charset problems.
+ See http://docs.sencha.com/ext-js/3-4/#!/api/Ext.form.BasicForm-cfg-fileUpload
+ """
if request.method != 'POST':
raise HTTPBadRequest()
try:
file = request.POST['file']
except KeyError:
raise HTTPBadRequest()
response = Response()
- response.app_iter = file.file
+ response.app_iter = base64_encode_chunks(file.file)
- response.content_type = 'application/octet-stream'
+ response.content_type = 'text/html'
return response
|
Return a base64 text/html response instead of a binary response
|
## Code Before:
import os.path
import re
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.response import Response
from pyramid.view import view_config
@view_config(route_name='echo')
def echo(request):
if request.method != 'POST':
raise HTTPBadRequest()
try:
file = request.POST['file']
except KeyError:
raise HTTPBadRequest()
response = Response()
response.app_iter = file.file
response.content_type = 'application/octet-stream'
return response
## Instruction:
Return a base64 text/html response instead of a binary response
## Code After:
from base64 import b64encode
import os.path
import re
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.response import Response
from pyramid.view import view_config
def base64_encode_chunks(file, chunk_size=57):
"""
Generate base64 encoded lines of up to 76 (== 57 * 8 / 6) characters, according to RFC2045.
See http://en.wikipedia.org/wiki/Base64
"""
while True:
line = file.read(chunk_size)
if not line:
break
yield b64encode(line) + '\n'
@view_config(route_name='echo')
def echo(request):
"""
Echo an uploaded file back to the client as an text/html document so it can be handled by Ext.
The response is base64 encoded to ensure that there are no special HTML characters or charset problems.
See http://docs.sencha.com/ext-js/3-4/#!/api/Ext.form.BasicForm-cfg-fileUpload
"""
if request.method != 'POST':
raise HTTPBadRequest()
try:
file = request.POST['file']
except KeyError:
raise HTTPBadRequest()
response = Response()
response.app_iter = base64_encode_chunks(file.file)
response.content_type = 'text/html'
return response
|
+ from base64 import b64encode
import os.path
import re
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.response import Response
from pyramid.view import view_config
+ def base64_encode_chunks(file, chunk_size=57):
+ """
+ Generate base64 encoded lines of up to 76 (== 57 * 8 / 6) characters, according to RFC2045.
+ See http://en.wikipedia.org/wiki/Base64
+ """
+ while True:
+ line = file.read(chunk_size)
+ if not line:
+ break
+ yield b64encode(line) + '\n'
+
+
@view_config(route_name='echo')
def echo(request):
+ """
+ Echo an uploaded file back to the client as an text/html document so it can be handled by Ext.
+ The response is base64 encoded to ensure that there are no special HTML characters or charset problems.
+ See http://docs.sencha.com/ext-js/3-4/#!/api/Ext.form.BasicForm-cfg-fileUpload
+ """
if request.method != 'POST':
raise HTTPBadRequest()
try:
file = request.POST['file']
except KeyError:
raise HTTPBadRequest()
response = Response()
- response.app_iter = file.file
+ response.app_iter = base64_encode_chunks(file.file)
? +++++++++++++++++++++ +
- response.content_type = 'application/octet-stream'
? -------------- ^^ ---
+ response.content_type = 'text/html'
? + ^^ +
return response
|
494d35234e30d368a9539910ff3ad6d45ed73125
|
containers/containers.py
|
containers/containers.py
|
try:
from urllib import urlretrieve
except ImportError:
from urllib.request import urlretrieve
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import os
def simple_discovery(path, var=None, secure=True):
if secure:
protocol = 'https'
else:
protocol = 'http'
url = '{protocol}://{path}.aci'.format(path=path, protocol=protocol)
parsed = urlparse(url)
_, local_file = os.path.split(parsed.path)
if var is not None:
local_file = os.path.join(var, local_file)
urlretrieve(url, local_file)
return local_file
class AppContainer(object):
def __init__(self, path=None):
self.path = path
|
try:
from urllib import urlretrieve
except ImportError:
from urllib.request import urlretrieve
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import os
def simple_discovery(name, var=None, secure=True):
'''Perform simple discovery and save the discovered ACI locally.
:param name: Name of app.
:type name: str.
:param var: Directory to save app to.
:type var: str.
:param secure: Choose to use HTTPS or HTTP.
:type secure: bool.
:returns: str -- the name of the ACI.
'''
if secure:
protocol = 'https'
else:
protocol = 'http'
url = '{protocol}://{path}.aci'.format(path=path, protocol=protocol)
parsed = urlparse(url)
_, local_file = os.path.split(parsed.path)
if var is not None:
local_file = os.path.join(var, local_file)
urlretrieve(url, local_file)
return local_file
class AppContainer(object):
def __init__(self, path=None):
self.path = path
|
Add better docstring to simple_discovery
|
Add better docstring to simple_discovery
|
Python
|
mit
|
kragniz/containers
|
try:
from urllib import urlretrieve
except ImportError:
from urllib.request import urlretrieve
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import os
- def simple_discovery(path, var=None, secure=True):
+ def simple_discovery(name, var=None, secure=True):
+ '''Perform simple discovery and save the discovered ACI locally.
+
+ :param name: Name of app.
+ :type name: str.
+ :param var: Directory to save app to.
+ :type var: str.
+ :param secure: Choose to use HTTPS or HTTP.
+ :type secure: bool.
+ :returns: str -- the name of the ACI.
+ '''
if secure:
protocol = 'https'
else:
protocol = 'http'
url = '{protocol}://{path}.aci'.format(path=path, protocol=protocol)
parsed = urlparse(url)
_, local_file = os.path.split(parsed.path)
if var is not None:
local_file = os.path.join(var, local_file)
urlretrieve(url, local_file)
return local_file
class AppContainer(object):
def __init__(self, path=None):
self.path = path
|
Add better docstring to simple_discovery
|
## Code Before:
try:
from urllib import urlretrieve
except ImportError:
from urllib.request import urlretrieve
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import os
def simple_discovery(path, var=None, secure=True):
if secure:
protocol = 'https'
else:
protocol = 'http'
url = '{protocol}://{path}.aci'.format(path=path, protocol=protocol)
parsed = urlparse(url)
_, local_file = os.path.split(parsed.path)
if var is not None:
local_file = os.path.join(var, local_file)
urlretrieve(url, local_file)
return local_file
class AppContainer(object):
def __init__(self, path=None):
self.path = path
## Instruction:
Add better docstring to simple_discovery
## Code After:
try:
from urllib import urlretrieve
except ImportError:
from urllib.request import urlretrieve
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import os
def simple_discovery(name, var=None, secure=True):
'''Perform simple discovery and save the discovered ACI locally.
:param name: Name of app.
:type name: str.
:param var: Directory to save app to.
:type var: str.
:param secure: Choose to use HTTPS or HTTP.
:type secure: bool.
:returns: str -- the name of the ACI.
'''
if secure:
protocol = 'https'
else:
protocol = 'http'
url = '{protocol}://{path}.aci'.format(path=path, protocol=protocol)
parsed = urlparse(url)
_, local_file = os.path.split(parsed.path)
if var is not None:
local_file = os.path.join(var, local_file)
urlretrieve(url, local_file)
return local_file
class AppContainer(object):
def __init__(self, path=None):
self.path = path
|
try:
from urllib import urlretrieve
except ImportError:
from urllib.request import urlretrieve
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import os
- def simple_discovery(path, var=None, secure=True):
? ^ ^^
+ def simple_discovery(name, var=None, secure=True):
? ^ ^^
+ '''Perform simple discovery and save the discovered ACI locally.
+
+ :param name: Name of app.
+ :type name: str.
+ :param var: Directory to save app to.
+ :type var: str.
+ :param secure: Choose to use HTTPS or HTTP.
+ :type secure: bool.
+ :returns: str -- the name of the ACI.
+ '''
if secure:
protocol = 'https'
else:
protocol = 'http'
url = '{protocol}://{path}.aci'.format(path=path, protocol=protocol)
parsed = urlparse(url)
_, local_file = os.path.split(parsed.path)
if var is not None:
local_file = os.path.join(var, local_file)
urlretrieve(url, local_file)
return local_file
class AppContainer(object):
def __init__(self, path=None):
self.path = path
|
0232afac110e2cf9f841e861bd9622bcaf79616a
|
tensorbayes/distributions.py
|
tensorbayes/distributions.py
|
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
|
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1):
if eps > 0.0:
qv = tf.add(qv, eps, name='clipped_var1')
pv = tf.add(qv, eps, name='clipped_var2')
return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv +
tf.square(qm - pm) / pv - 1, axis=-1)
|
Add tf implementation of KL between normals
|
Add tf implementation of KL between normals
|
Python
|
mit
|
RuiShu/tensorbayes
|
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
+ def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1):
+ if eps > 0.0:
+ qv = tf.add(qv, eps, name='clipped_var1')
+ pv = tf.add(qv, eps, name='clipped_var2')
+
+ return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv +
+ tf.square(qm - pm) / pv - 1, axis=-1)
+
|
Add tf implementation of KL between normals
|
## Code Before:
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
## Instruction:
Add tf implementation of KL between normals
## Code After:
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1):
if eps > 0.0:
qv = tf.add(qv, eps, name='clipped_var1')
pv = tf.add(qv, eps, name='clipped_var2')
return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv +
tf.square(qm - pm) / pv - 1, axis=-1)
|
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
+
+ def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1):
+ if eps > 0.0:
+ qv = tf.add(qv, eps, name='clipped_var1')
+ pv = tf.add(qv, eps, name='clipped_var2')
+
+ return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv +
+ tf.square(qm - pm) / pv - 1, axis=-1)
|
cbc60512f0f29ba3444573b6fd835e1505e5e35c
|
radar/radar/validation/fetal_anomaly_scans.py
|
radar/radar/validation/fetal_anomaly_scans.py
|
from radar.validation.data_sources import DataSourceValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length
class FetalAnomalyScanValidation(PatientValidationMixin, DataSourceValidationMixin, MetaValidationMixin, Validation):
date_of_scan = Field([required()])
gestational_age = Field([required(), min_(8 * 7, 'days'), max_(45 * 7, 'days')])
oligohydramnios = Field([optional()])
right_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
right_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
left_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
left_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
|
from radar.validation.data_sources import DataSourceValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length, not_in_future
class FetalAnomalyScanValidation(PatientValidationMixin, DataSourceValidationMixin, MetaValidationMixin, Validation):
date_of_scan = Field([required(), not_in_future()])
gestational_age = Field([required(), min_(8 * 7, 'days'), max_(45 * 7, 'days')])
oligohydramnios = Field([optional()])
right_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
right_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
left_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
left_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
|
Check date of scan is not in future
|
Check date of scan is not in future
|
Python
|
agpl-3.0
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
from radar.validation.data_sources import DataSourceValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
- from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length
+ from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length, not_in_future
class FetalAnomalyScanValidation(PatientValidationMixin, DataSourceValidationMixin, MetaValidationMixin, Validation):
- date_of_scan = Field([required()])
+ date_of_scan = Field([required(), not_in_future()])
gestational_age = Field([required(), min_(8 * 7, 'days'), max_(45 * 7, 'days')])
oligohydramnios = Field([optional()])
right_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
right_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
left_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
left_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
|
Check date of scan is not in future
|
## Code Before:
from radar.validation.data_sources import DataSourceValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length
class FetalAnomalyScanValidation(PatientValidationMixin, DataSourceValidationMixin, MetaValidationMixin, Validation):
date_of_scan = Field([required()])
gestational_age = Field([required(), min_(8 * 7, 'days'), max_(45 * 7, 'days')])
oligohydramnios = Field([optional()])
right_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
right_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
left_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
left_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
## Instruction:
Check date of scan is not in future
## Code After:
from radar.validation.data_sources import DataSourceValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length, not_in_future
class FetalAnomalyScanValidation(PatientValidationMixin, DataSourceValidationMixin, MetaValidationMixin, Validation):
date_of_scan = Field([required(), not_in_future()])
gestational_age = Field([required(), min_(8 * 7, 'days'), max_(45 * 7, 'days')])
oligohydramnios = Field([optional()])
right_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
right_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
left_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
left_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
|
from radar.validation.data_sources import DataSourceValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
- from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length
+ from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length, not_in_future
? +++++++++++++++
class FetalAnomalyScanValidation(PatientValidationMixin, DataSourceValidationMixin, MetaValidationMixin, Validation):
- date_of_scan = Field([required()])
+ date_of_scan = Field([required(), not_in_future()])
? +++++++++++++++++
gestational_age = Field([required(), min_(8 * 7, 'days'), max_(45 * 7, 'days')])
oligohydramnios = Field([optional()])
right_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
right_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
left_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
left_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
|
2814f5b2bbd2c53c165f13009eb85cb2c5030b57
|
chicago/search_indexes.py
|
chicago/search_indexes.py
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if obj.last_action_date:
now = app_timezone.localize(datetime.now())
# obj.last_action_date can be in the future
weeks_passed = (now - obj.last_action_date).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if data['last_action_date']:
today = app_timezone.localize(datetime.now()).date()
# data['last_action_date'] can be in the future
weeks_passed = (today - data['last_action_date']).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
Use prepared data, rather than the object last action date, to determine boost
|
Use prepared data, rather than the object last action date, to determine boost
|
Python
|
mit
|
datamade/chi-councilmatic,datamade/chi-councilmatic,datamade/chi-councilmatic,datamade/chi-councilmatic,datamade/chi-councilmatic
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
- if obj.last_action_date:
- now = app_timezone.localize(datetime.now())
+ if data['last_action_date']:
+ today = app_timezone.localize(datetime.now()).date()
+
- # obj.last_action_date can be in the future
+ # data['last_action_date'] can be in the future
- weeks_passed = (now - obj.last_action_date).days / 7 + 1
+ weeks_passed = (today - data['last_action_date']).days / 7 + 1
+
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
Use prepared data, rather than the object last action date, to determine boost
|
## Code Before:
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if obj.last_action_date:
now = app_timezone.localize(datetime.now())
# obj.last_action_date can be in the future
weeks_passed = (now - obj.last_action_date).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
## Instruction:
Use prepared data, rather than the object last action date, to determine boost
## Code After:
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if data['last_action_date']:
today = app_timezone.localize(datetime.now()).date()
# data['last_action_date'] can be in the future
weeks_passed = (today - data['last_action_date']).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
- if obj.last_action_date:
- now = app_timezone.localize(datetime.now())
+ if data['last_action_date']:
+ today = app_timezone.localize(datetime.now()).date()
+
- # obj.last_action_date can be in the future
? ^^^^
+ # data['last_action_date'] can be in the future
? ^^^^^^ ++
- weeks_passed = (now - obj.last_action_date).days / 7 + 1
? ^ ^ ^^^^
+ weeks_passed = (today - data['last_action_date']).days / 7 + 1
? ^ ^^^ ^^^^^^ ++
+
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
7f8a2e8e3b2721111c2de506d2d3bdea415e9b2d
|
markups/common.py
|
markups/common.py
|
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
Use %APPDATA% for CONFIGURATION_DIR on Windows
|
Use %APPDATA% for CONFIGURATION_DIR on Windows
References retext-project/retext#156.
|
Python
|
bsd-3-clause
|
retext-project/pymarkups,mitya57/pymarkups
|
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
- CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or
+ CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
Use %APPDATA% for CONFIGURATION_DIR on Windows
|
## Code Before:
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
## Instruction:
Use %APPDATA% for CONFIGURATION_DIR on Windows
## Code After:
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
- CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or
+ CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
7c034802338c78ccb895b7a362e0d4ed11b6b4da
|
.offlineimap.py
|
.offlineimap.py
|
import re, os
def get_password_emacs(machine, login, port):
s = "machine %s login %s port %s password ([^ ]*)\n" % (machine, login, port)
p = re.compile(s)
authinfo = os.popen("gpg -q -d ~/.authinfo.gpg").read()
return p.search(authinfo).group(1)
|
import re, os
def get_password_emacs(machine, login, port):
"""Return password for the given machine/login/port.
Your .authinfo.gpg file had better follow the following order, or
you will not get a result.
"""
s = "machine %s login %s port %s password ([^ ]*)\n" % (machine, login, port)
p = re.compile(s)
authinfo = os.popen("gpg -q -d ~/.authinfo.gpg").read()
return p.search(authinfo).group(1)
|
Add a comment for the get_password_emacs function
|
Add a comment for the get_password_emacs function
Comment necessary because the format of authinfo needs to match the
semi-brittle regex (ah, regexes...)
This also moves the file to a proper dotfile, similar to commit
42f2b513a7949edf901b18233c1229bfcc24b706
|
Python
|
mit
|
olive42/dotfiles,olive42/dotfiles
|
import re, os
def get_password_emacs(machine, login, port):
+ """Return password for the given machine/login/port.
+
+ Your .authinfo.gpg file had better follow the following order, or
+ you will not get a result.
+ """
s = "machine %s login %s port %s password ([^ ]*)\n" % (machine, login, port)
p = re.compile(s)
authinfo = os.popen("gpg -q -d ~/.authinfo.gpg").read()
return p.search(authinfo).group(1)
|
Add a comment for the get_password_emacs function
|
## Code Before:
import re, os
def get_password_emacs(machine, login, port):
s = "machine %s login %s port %s password ([^ ]*)\n" % (machine, login, port)
p = re.compile(s)
authinfo = os.popen("gpg -q -d ~/.authinfo.gpg").read()
return p.search(authinfo).group(1)
## Instruction:
Add a comment for the get_password_emacs function
## Code After:
import re, os
def get_password_emacs(machine, login, port):
"""Return password for the given machine/login/port.
Your .authinfo.gpg file had better follow the following order, or
you will not get a result.
"""
s = "machine %s login %s port %s password ([^ ]*)\n" % (machine, login, port)
p = re.compile(s)
authinfo = os.popen("gpg -q -d ~/.authinfo.gpg").read()
return p.search(authinfo).group(1)
|
import re, os
def get_password_emacs(machine, login, port):
+ """Return password for the given machine/login/port.
+
+ Your .authinfo.gpg file had better follow the following order, or
+ you will not get a result.
+ """
s = "machine %s login %s port %s password ([^ ]*)\n" % (machine, login, port)
p = re.compile(s)
authinfo = os.popen("gpg -q -d ~/.authinfo.gpg").read()
return p.search(authinfo).group(1)
|
d0f1114fdcee63d65c5dd74501b3e329a12f8e53
|
indra/sources/eidos/eidos_reader.py
|
indra/sources/eidos/eidos_reader.py
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system
"""
def __init__(self):
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
Make Eidos reader instantiate when first reading
|
Make Eidos reader instantiate when first reading
|
Python
|
bsd-2-clause
|
johnbachman/belpy,johnbachman/indra,sorgerlab/indra,johnbachman/indra,bgyori/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,bgyori/indra,johnbachman/indra,johnbachman/belpy,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
- A Scala object, an instance of the Eidos reading system
+ A Scala object, an instance of the Eidos reading system. It is
+ instantiated only when first processing text.
"""
def __init__(self):
+ self.eidos_reader = None
- eidos = autoclass('org.clulab.wm.AgroSystem')
- self.eidos_reader = eidos(autoclass('java.lang.Object')())
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
+ if self.eidos_reader is None:
+ eidos = autoclass('org.clulab.wm.AgroSystem')
+ self.eidos_reader = eidos(autoclass('java.lang.Object')())
+
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
Make Eidos reader instantiate when first reading
|
## Code Before:
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system
"""
def __init__(self):
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
## Instruction:
Make Eidos reader instantiate when first reading
## Code After:
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
- A Scala object, an instance of the Eidos reading system
+ A Scala object, an instance of the Eidos reading system. It is
? +++++++
+ instantiated only when first processing text.
"""
def __init__(self):
+ self.eidos_reader = None
- eidos = autoclass('org.clulab.wm.AgroSystem')
- self.eidos_reader = eidos(autoclass('java.lang.Object')())
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
+ if self.eidos_reader is None:
+ eidos = autoclass('org.clulab.wm.AgroSystem')
+ self.eidos_reader = eidos(autoclass('java.lang.Object')())
+
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
88abdf5365977a47abaa0d0a8f3275e4635c8378
|
singleuser/user-config.py
|
singleuser/user-config.py
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'rb') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'rb') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
for fam in (
'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia',
'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity',
'wikidata', 'mediawiki'
):
usernames[fam]['*'] = os.environ['JPY_USER']
del fam
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
Fix OAuth integration for all wiki families
|
Fix OAuth integration for all wiki families
Earlier you needed to edit config file to set family to
whatever you were working on, even if you constructed a
Site object referring to other website. This would cause
funky errors about 'Logged in as X, expected None' errors.
Fix by listing almost all the families people will want to
use!
|
Python
|
mit
|
yuvipanda/paws,yuvipanda/paws
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'rb') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
+ for fam in (
+ 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia',
+ 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity',
+ 'wikidata', 'mediawiki'
+ ):
- usernames[family]['*'] = os.environ['JPY_USER']
+ usernames[fam]['*'] = os.environ['JPY_USER']
+
+ del fam
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
Fix OAuth integration for all wiki families
|
## Code Before:
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'rb') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
## Instruction:
Fix OAuth integration for all wiki families
## Code After:
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'rb') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
for fam in (
'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia',
'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity',
'wikidata', 'mediawiki'
):
usernames[fam]['*'] = os.environ['JPY_USER']
del fam
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'rb') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
+ for fam in (
+ 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia',
+ 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity',
+ 'wikidata', 'mediawiki'
+ ):
- usernames[family]['*'] = os.environ['JPY_USER']
? ---
+ usernames[fam]['*'] = os.environ['JPY_USER']
? ++++
+
+ del fam
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
521e24fa115e69bca39d7cca89ce42e8efa3b077
|
tools/perf_expectations/PRESUBMIT.py
|
tools/perf_expectations/PRESUBMIT.py
|
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
PERF_EXPECTATIONS = 'perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == input_api.os_path.basename(path):
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == input_api.os_path.basename(path):
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
|
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
PERF_EXPECTATIONS = 'tools/perf_expectations/perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
|
Use full pathname to perf_expectations in test.
|
Use full pathname to perf_expectations in test.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/266055
git-svn-id: http://src.chromium.org/svn/trunk/src@28770 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Former-commit-id: f9d8e0a8dae19e482d3c435a76b4e38403e646b5
|
Python
|
bsd-3-clause
|
meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser
|
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
- PERF_EXPECTATIONS = 'perf_expectations.json'
+ PERF_EXPECTATIONS = 'tools/perf_expectations/perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
- if PERF_EXPECTATIONS == input_api.os_path.basename(path):
+ if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
- if PERF_EXPECTATIONS == input_api.os_path.basename(path):
+ if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
|
Use full pathname to perf_expectations in test.
|
## Code Before:
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
PERF_EXPECTATIONS = 'perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == input_api.os_path.basename(path):
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == input_api.os_path.basename(path):
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
## Instruction:
Use full pathname to perf_expectations in test.
## Code After:
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
PERF_EXPECTATIONS = 'tools/perf_expectations/perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
|
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
- PERF_EXPECTATIONS = 'perf_expectations.json'
+ PERF_EXPECTATIONS = 'tools/perf_expectations/perf_expectations.json'
? ++++++++++++++++++++++++
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
- if PERF_EXPECTATIONS == input_api.os_path.basename(path):
+ if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
- if PERF_EXPECTATIONS == input_api.os_path.basename(path):
+ if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
|
593bab981f36f7af52ae55914c18e368e8c1a94f
|
examples/app-on-ws-init.py
|
examples/app-on-ws-init.py
|
from argparse import ArgumentParser
import i3ipc
i3 = i3ipc.Connection()
parser = ArgumentParser(description='Open an application on a given workspace when it is initialized')
parser.add_argument('--workspace', metavar='NAME', help='The name of the workspace')
parser.add_argument('--command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
def on_workspace(i3, e):
if e.current.props.name == args.workspace and not len(e.current.leaves()):
i3.command('exec {}'.format(args.command))
i3.on('workspace::focus', on_workspace)
i3.main()
|
from argparse import ArgumentParser
import i3ipc
i3 = i3ipc.Connection()
parser = ArgumentParser(description="""Open the given application each time the
given workspace is created. For instance, running 'app-on-ws-init.py 6
i3-sensible-terminal' should open your terminal as soon as you create the
workspace 6.
""")
parser.add_argument('workspace', metavar='WS_NAME', help='The name of the workspace')
parser.add_argument('command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
def on_workspace(i3, e):
if e.current.props.name == args.workspace and not len(e.current.leaves()):
i3.command('exec {}'.format(args.command))
i3.on('workspace::focus', on_workspace)
i3.main()
|
Make the 2 mandatory parameters mandatory. Make the help message a bit clearer and provides an example.
|
Make the 2 mandatory parameters mandatory.
Make the help message a bit clearer and provides an example.
|
Python
|
bsd-3-clause
|
xenomachina/i3ipc-python,nicoe/i3ipc-python,acrisci/i3ipc-python,chrsclmn/i3ipc-python
|
from argparse import ArgumentParser
import i3ipc
i3 = i3ipc.Connection()
- parser = ArgumentParser(description='Open an application on a given workspace when it is initialized')
+ parser = ArgumentParser(description="""Open the given application each time the
+ given workspace is created. For instance, running 'app-on-ws-init.py 6
+ i3-sensible-terminal' should open your terminal as soon as you create the
+ workspace 6.
+ """)
- parser.add_argument('--workspace', metavar='NAME', help='The name of the workspace')
+ parser.add_argument('workspace', metavar='WS_NAME', help='The name of the workspace')
- parser.add_argument('--command', metavar='CMD', help='The command to run on the newly initted workspace')
+ parser.add_argument('command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
def on_workspace(i3, e):
if e.current.props.name == args.workspace and not len(e.current.leaves()):
i3.command('exec {}'.format(args.command))
i3.on('workspace::focus', on_workspace)
i3.main()
|
Make the 2 mandatory parameters mandatory. Make the help message a bit clearer and provides an example.
|
## Code Before:
from argparse import ArgumentParser
import i3ipc
i3 = i3ipc.Connection()
parser = ArgumentParser(description='Open an application on a given workspace when it is initialized')
parser.add_argument('--workspace', metavar='NAME', help='The name of the workspace')
parser.add_argument('--command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
def on_workspace(i3, e):
if e.current.props.name == args.workspace and not len(e.current.leaves()):
i3.command('exec {}'.format(args.command))
i3.on('workspace::focus', on_workspace)
i3.main()
## Instruction:
Make the 2 mandatory parameters mandatory. Make the help message a bit clearer and provides an example.
## Code After:
from argparse import ArgumentParser
import i3ipc
i3 = i3ipc.Connection()
parser = ArgumentParser(description="""Open the given application each time the
given workspace is created. For instance, running 'app-on-ws-init.py 6
i3-sensible-terminal' should open your terminal as soon as you create the
workspace 6.
""")
parser.add_argument('workspace', metavar='WS_NAME', help='The name of the workspace')
parser.add_argument('command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
def on_workspace(i3, e):
if e.current.props.name == args.workspace and not len(e.current.leaves()):
i3.command('exec {}'.format(args.command))
i3.on('workspace::focus', on_workspace)
i3.main()
|
from argparse import ArgumentParser
import i3ipc
i3 = i3ipc.Connection()
- parser = ArgumentParser(description='Open an application on a given workspace when it is initialized')
+ parser = ArgumentParser(description="""Open the given application each time the
+ given workspace is created. For instance, running 'app-on-ws-init.py 6
+ i3-sensible-terminal' should open your terminal as soon as you create the
+ workspace 6.
+ """)
- parser.add_argument('--workspace', metavar='NAME', help='The name of the workspace')
? --
+ parser.add_argument('workspace', metavar='WS_NAME', help='The name of the workspace')
? +++
- parser.add_argument('--command', metavar='CMD', help='The command to run on the newly initted workspace')
? --
+ parser.add_argument('command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
def on_workspace(i3, e):
if e.current.props.name == args.workspace and not len(e.current.leaves()):
i3.command('exec {}'.format(args.command))
i3.on('workspace::focus', on_workspace)
i3.main()
|
c98ac4ca313606c966dc45dbe7861898177f2f04
|
api/tests/test_delete_bucket_list.py
|
api/tests/test_delete_bucket_list.py
|
import json
from api.test import BaseTestCase
from api.models import BucketList
class TestDeleteBucketList(BaseTestCase):
def test_delete_bucket_list(self):
bucket_list_one = {
"description": "Movies i have to watch by the end of the week",
"status": "Pending",
"title": "Entertainment",
"user_id": 1
}
self.client.post('/api/v1/bucketlists',
headers={
'Authorization': 'JWT ' + self.token
},
data=json.dumps(bucket_list_one),
content_type='application/json')
count = len(BucketList.query.all())
self.client.delete('/api/v1/bucketlists/1',
headers={
'Authorization': 'JWT ' + self.token
},)
new_count = len(BucketList.query.all())
self.assertEqual(new_count - count, -1)
|
import json
from api.test import BaseTestCase
from api.models import BucketList
class TestDeleteBucketList(BaseTestCase):
def test_delete_bucket_list(self):
bucket_list_one = {
"description": "Movies i have to watch by the end of the week",
"status": "Pending",
"title": "Entertainment",
"user_id": 1
}
self.client.post('/api/v1/bucketlists',
headers={
'Authorization': 'JWT ' + self.token
},
data=json.dumps(bucket_list_one),
content_type='application/json')
count = len(BucketList.query.all())
self.client.delete('/api/v1/bucketlists/1',
headers={
'Authorization': 'JWT ' + self.token
},)
new_count = len(BucketList.query.all())
self.assertEqual(new_count - count, -1)
response = self.client.get(
'/api/v1/bucketlists/1',
headers=dict(
Authorization='Bearer ' + self.token
)
)
self.assertIn("Bucket list not found", str(response.data))
self.assertEqual(response.status_code, 404)
|
Modify test to test that bucketlist nolonger exists in system
|
Modify test to test that bucketlist nolonger exists in system
|
Python
|
mit
|
EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list
|
import json
from api.test import BaseTestCase
from api.models import BucketList
class TestDeleteBucketList(BaseTestCase):
def test_delete_bucket_list(self):
bucket_list_one = {
"description": "Movies i have to watch by the end of the week",
"status": "Pending",
"title": "Entertainment",
"user_id": 1
}
self.client.post('/api/v1/bucketlists',
headers={
'Authorization': 'JWT ' + self.token
},
data=json.dumps(bucket_list_one),
content_type='application/json')
count = len(BucketList.query.all())
self.client.delete('/api/v1/bucketlists/1',
headers={
'Authorization': 'JWT ' + self.token
},)
new_count = len(BucketList.query.all())
self.assertEqual(new_count - count, -1)
+ response = self.client.get(
+ '/api/v1/bucketlists/1',
+ headers=dict(
+ Authorization='Bearer ' + self.token
+ )
+ )
+
+ self.assertIn("Bucket list not found", str(response.data))
+ self.assertEqual(response.status_code, 404)
+
|
Modify test to test that bucketlist nolonger exists in system
|
## Code Before:
import json
from api.test import BaseTestCase
from api.models import BucketList
class TestDeleteBucketList(BaseTestCase):
def test_delete_bucket_list(self):
bucket_list_one = {
"description": "Movies i have to watch by the end of the week",
"status": "Pending",
"title": "Entertainment",
"user_id": 1
}
self.client.post('/api/v1/bucketlists',
headers={
'Authorization': 'JWT ' + self.token
},
data=json.dumps(bucket_list_one),
content_type='application/json')
count = len(BucketList.query.all())
self.client.delete('/api/v1/bucketlists/1',
headers={
'Authorization': 'JWT ' + self.token
},)
new_count = len(BucketList.query.all())
self.assertEqual(new_count - count, -1)
## Instruction:
Modify test to test that bucketlist nolonger exists in system
## Code After:
import json
from api.test import BaseTestCase
from api.models import BucketList
class TestDeleteBucketList(BaseTestCase):
def test_delete_bucket_list(self):
bucket_list_one = {
"description": "Movies i have to watch by the end of the week",
"status": "Pending",
"title": "Entertainment",
"user_id": 1
}
self.client.post('/api/v1/bucketlists',
headers={
'Authorization': 'JWT ' + self.token
},
data=json.dumps(bucket_list_one),
content_type='application/json')
count = len(BucketList.query.all())
self.client.delete('/api/v1/bucketlists/1',
headers={
'Authorization': 'JWT ' + self.token
},)
new_count = len(BucketList.query.all())
self.assertEqual(new_count - count, -1)
response = self.client.get(
'/api/v1/bucketlists/1',
headers=dict(
Authorization='Bearer ' + self.token
)
)
self.assertIn("Bucket list not found", str(response.data))
self.assertEqual(response.status_code, 404)
|
import json
from api.test import BaseTestCase
from api.models import BucketList
class TestDeleteBucketList(BaseTestCase):
def test_delete_bucket_list(self):
bucket_list_one = {
"description": "Movies i have to watch by the end of the week",
"status": "Pending",
"title": "Entertainment",
"user_id": 1
}
self.client.post('/api/v1/bucketlists',
headers={
'Authorization': 'JWT ' + self.token
},
data=json.dumps(bucket_list_one),
content_type='application/json')
count = len(BucketList.query.all())
self.client.delete('/api/v1/bucketlists/1',
headers={
'Authorization': 'JWT ' + self.token
},)
new_count = len(BucketList.query.all())
self.assertEqual(new_count - count, -1)
+
+ response = self.client.get(
+ '/api/v1/bucketlists/1',
+ headers=dict(
+ Authorization='Bearer ' + self.token
+ )
+ )
+
+ self.assertIn("Bucket list not found", str(response.data))
+ self.assertEqual(response.status_code, 404)
|
48081a925d5b69e18a1f04c74cbe98b590e77c5b
|
tests/unit/test_pylama_isort.py
|
tests/unit/test_pylama_isort.py
|
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
|
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
def test_skip(self, src_dir, tmpdir):
incorrect = tmpdir.join("incorrect.py")
incorrect.write("# isort: skip_file\nimport b\nimport a\n")
assert not self.instance.run(str(incorrect))
|
Add a test for skip functionality
|
Add a test for skip functionality
|
Python
|
mit
|
PyCQA/isort,PyCQA/isort
|
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
+ def test_skip(self, src_dir, tmpdir):
+ incorrect = tmpdir.join("incorrect.py")
+ incorrect.write("# isort: skip_file\nimport b\nimport a\n")
+ assert not self.instance.run(str(incorrect))
+
|
Add a test for skip functionality
|
## Code Before:
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
## Instruction:
Add a test for skip functionality
## Code After:
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
def test_skip(self, src_dir, tmpdir):
incorrect = tmpdir.join("incorrect.py")
incorrect.write("# isort: skip_file\nimport b\nimport a\n")
assert not self.instance.run(str(incorrect))
|
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
+
+ def test_skip(self, src_dir, tmpdir):
+ incorrect = tmpdir.join("incorrect.py")
+ incorrect.write("# isort: skip_file\nimport b\nimport a\n")
+ assert not self.instance.run(str(incorrect))
|
3fd2d1cade716f264b2febc3627b1443a1d3e604
|
taiga/projects/migrations/0043_auto_20160530_1004.py
|
taiga/projects/migrations/0043_auto_20160530_1004.py
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('projects', '0042_auto_20160525_0911'),
]
operations = [
migrations.AlterField(
model_name='project',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_projects', to=settings.AUTH_USER_MODEL, verbose_name='owner'),
),
]
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('projects', '0040_remove_memberships_of_cancelled_users_acounts'),
]
operations = [
migrations.AlterField(
model_name='project',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_projects', to=settings.AUTH_USER_MODEL, verbose_name='owner'),
),
]
|
Fix a problem with a migration between master and stable branch
|
Fix a problem with a migration between master and stable branch
|
Python
|
agpl-3.0
|
taigaio/taiga-back,dayatz/taiga-back,xdevelsistemas/taiga-back-community,taigaio/taiga-back,taigaio/taiga-back,dayatz/taiga-back,dayatz/taiga-back,xdevelsistemas/taiga-back-community,xdevelsistemas/taiga-back-community
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
- ('projects', '0042_auto_20160525_0911'),
+ ('projects', '0040_remove_memberships_of_cancelled_users_acounts'),
]
operations = [
migrations.AlterField(
model_name='project',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_projects', to=settings.AUTH_USER_MODEL, verbose_name='owner'),
),
]
|
Fix a problem with a migration between master and stable branch
|
## Code Before:
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('projects', '0042_auto_20160525_0911'),
]
operations = [
migrations.AlterField(
model_name='project',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_projects', to=settings.AUTH_USER_MODEL, verbose_name='owner'),
),
]
## Instruction:
Fix a problem with a migration between master and stable branch
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('projects', '0040_remove_memberships_of_cancelled_users_acounts'),
]
operations = [
migrations.AlterField(
model_name='project',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_projects', to=settings.AUTH_USER_MODEL, verbose_name='owner'),
),
]
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
- ('projects', '0042_auto_20160525_0911'),
+ ('projects', '0040_remove_memberships_of_cancelled_users_acounts'),
]
operations = [
migrations.AlterField(
model_name='project',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_projects', to=settings.AUTH_USER_MODEL, verbose_name='owner'),
),
]
|
a590e100a23a0c225467b34b7c4481ece45905ad
|
tests/test_shells/postproc.py
|
tests/test_shells/postproc.py
|
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(pid, 'PID')
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
W.write(line)
os.rename(new_fname, fname)
|
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
line = line.replace(pid, 'PID')
W.write(line)
os.rename(new_fname, fname)
|
Fix functional shell tests in travis
|
Fix functional shell tests in travis
Hostname in travis contains random numbers meaning that it occasionally may
contain a PID as well. Thus it must be replaced first.
|
Python
|
mit
|
Liangjianghao/powerline,bartvm/powerline,lukw00/powerline,kenrachynski/powerline,Luffin/powerline,DoctorJellyface/powerline,Luffin/powerline,areteix/powerline,cyrixhero/powerline,seanfisk/powerline,EricSB/powerline,dragon788/powerline,blindFS/powerline,magus424/powerline,wfscheper/powerline,dragon788/powerline,wfscheper/powerline,cyrixhero/powerline,keelerm84/powerline,Luffin/powerline,kenrachynski/powerline,seanfisk/powerline,russellb/powerline,IvanAli/powerline,xfumihiro/powerline,russellb/powerline,magus424/powerline,seanfisk/powerline,bartvm/powerline,DoctorJellyface/powerline,darac/powerline,EricSB/powerline,cyrixhero/powerline,firebitsbr/powerline,junix/powerline,areteix/powerline,S0lll0s/powerline,prvnkumar/powerline,QuLogic/powerline,bezhermoso/powerline,prvnkumar/powerline,kenrachynski/powerline,xfumihiro/powerline,firebitsbr/powerline,bartvm/powerline,Liangjianghao/powerline,EricSB/powerline,s0undt3ch/powerline,darac/powerline,russellb/powerline,Liangjianghao/powerline,dragon788/powerline,S0lll0s/powerline,QuLogic/powerline,firebitsbr/powerline,s0undt3ch/powerline,lukw00/powerline,s0undt3ch/powerline,xxxhycl2010/powerline,junix/powerline,darac/powerline,blindFS/powerline,prvnkumar/powerline,IvanAli/powerline,junix/powerline,bezhermoso/powerline,S0lll0s/powerline,wfscheper/powerline,IvanAli/powerline,xxxhycl2010/powerline,xfumihiro/powerline,DoctorJellyface/powerline,bezhermoso/powerline,areteix/powerline,magus424/powerline,lukw00/powerline,xxxhycl2010/powerline,blindFS/powerline,QuLogic/powerline,keelerm84/powerline
|
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
- line = line.replace(pid, 'PID')
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
+ line = line.replace(pid, 'PID')
W.write(line)
os.rename(new_fname, fname)
|
Fix functional shell tests in travis
|
## Code Before:
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(pid, 'PID')
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
W.write(line)
os.rename(new_fname, fname)
## Instruction:
Fix functional shell tests in travis
## Code After:
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
line = line.replace(pid, 'PID')
W.write(line)
os.rename(new_fname, fname)
|
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
- line = line.replace(pid, 'PID')
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
+ line = line.replace(pid, 'PID')
W.write(line)
os.rename(new_fname, fname)
|
5cb8d2a4187d867111b32491df6e53983f124d73
|
rawkit/raw.py
|
rawkit/raw.py
|
from rawkit.libraw import libraw
class Raw(object):
def __init__(self, filename=None):
self.data = libraw.libraw_init(0)
libraw.libraw_open_file(self.data, bytes(filename, 'utf-8'))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
libraw.libraw_close(self.data)
def process(self, options=None):
libraw.libraw_unpack(self.data)
libraw.libraw_dcraw_process(self.data)
def save(self, filename=None):
libraw.libraw_dcraw_ppm_tiff_writer(
self.data, bytes(filename, 'utf-8'))
|
from rawkit.libraw import libraw
class Raw(object):
def __init__(self, filename=None):
self.data = libraw.libraw_init(0)
libraw.libraw_open_file(self.data, bytes(filename, 'utf-8'))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
"""Clean up after ourselves when leaving the context manager."""
self.close()
def close(self):
"""Free the underlying raw representation."""
libraw.libraw_close(self.data)
def process(self, options=None):
"""
Unpack and process the raw data into something more usable.
"""
libraw.libraw_unpack(self.data)
libraw.libraw_dcraw_process(self.data)
def save(self, filename=None):
libraw.libraw_dcraw_ppm_tiff_writer(
self.data, bytes(filename, 'utf-8'))
|
Add close method to Raw class
|
Add close method to Raw class
Fixes #10
|
Python
|
mit
|
nagyistoce/rawkit,SamWhited/rawkit,photoshell/rawkit
|
from rawkit.libraw import libraw
class Raw(object):
def __init__(self, filename=None):
self.data = libraw.libraw_init(0)
libraw.libraw_open_file(self.data, bytes(filename, 'utf-8'))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
+ """Clean up after ourselves when leaving the context manager."""
+ self.close()
+
+ def close(self):
+ """Free the underlying raw representation."""
libraw.libraw_close(self.data)
def process(self, options=None):
+ """
+ Unpack and process the raw data into something more usable.
+
+ """
+
libraw.libraw_unpack(self.data)
libraw.libraw_dcraw_process(self.data)
def save(self, filename=None):
libraw.libraw_dcraw_ppm_tiff_writer(
self.data, bytes(filename, 'utf-8'))
|
Add close method to Raw class
|
## Code Before:
from rawkit.libraw import libraw
class Raw(object):
def __init__(self, filename=None):
self.data = libraw.libraw_init(0)
libraw.libraw_open_file(self.data, bytes(filename, 'utf-8'))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
libraw.libraw_close(self.data)
def process(self, options=None):
libraw.libraw_unpack(self.data)
libraw.libraw_dcraw_process(self.data)
def save(self, filename=None):
libraw.libraw_dcraw_ppm_tiff_writer(
self.data, bytes(filename, 'utf-8'))
## Instruction:
Add close method to Raw class
## Code After:
from rawkit.libraw import libraw
class Raw(object):
def __init__(self, filename=None):
self.data = libraw.libraw_init(0)
libraw.libraw_open_file(self.data, bytes(filename, 'utf-8'))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
"""Clean up after ourselves when leaving the context manager."""
self.close()
def close(self):
"""Free the underlying raw representation."""
libraw.libraw_close(self.data)
def process(self, options=None):
"""
Unpack and process the raw data into something more usable.
"""
libraw.libraw_unpack(self.data)
libraw.libraw_dcraw_process(self.data)
def save(self, filename=None):
libraw.libraw_dcraw_ppm_tiff_writer(
self.data, bytes(filename, 'utf-8'))
|
from rawkit.libraw import libraw
class Raw(object):
def __init__(self, filename=None):
self.data = libraw.libraw_init(0)
libraw.libraw_open_file(self.data, bytes(filename, 'utf-8'))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
+ """Clean up after ourselves when leaving the context manager."""
+ self.close()
+
+ def close(self):
+ """Free the underlying raw representation."""
libraw.libraw_close(self.data)
def process(self, options=None):
+ """
+ Unpack and process the raw data into something more usable.
+
+ """
+
libraw.libraw_unpack(self.data)
libraw.libraw_dcraw_process(self.data)
def save(self, filename=None):
libraw.libraw_dcraw_ppm_tiff_writer(
self.data, bytes(filename, 'utf-8'))
|
6782ad40a405f79f07fa1527131634f96944ffd6
|
apps/innovate/views.py
|
apps/innovate/views.py
|
import random
import jingo
from users.models import Profile
from projects.models import Project
from events.models import Event
from feeds.models import Entry
def splash(request):
"""Display splash page. With featured project, event, person, blog post."""
def get_random(cls, **kwargs):
choices = cls.objects.filter(**kwargs)
return choices and random.choice(choices) or None
return jingo.render(request, 'innovate/splash.html', {
'featured_project': get_random(Project, featured=True),
'featured_event': get_random(Event, featured=True),
'featured_user': get_random(Profile, featured=True),
'entry': get_random(Entry, link__featured=True)
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html')
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html')
|
import random
import jingo
from users.models import Profile
from projects.models import Project
from events.models import Event
from feeds.models import Entry
def splash(request):
"""Display splash page. With featured project, event, person, blog post."""
def get_random(cls, **kwargs):
choices = cls.objects.filter(**kwargs)
return choices and random.choice(choices) or None
return jingo.render(request, 'innovate/splash.html', {
'featured_project': get_random(Project, featured=True),
'featured_event': get_random(Event, featured=True),
'featured_user': get_random(Profile, featured=True),
'entry': get_random(Entry, link__featured=True)
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html', status=500)
|
Add status codes to the 404/500 error handlers.
|
Add status codes to the 404/500 error handlers.
|
Python
|
bsd-3-clause
|
mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite
|
import random
import jingo
from users.models import Profile
from projects.models import Project
from events.models import Event
from feeds.models import Entry
def splash(request):
"""Display splash page. With featured project, event, person, blog post."""
def get_random(cls, **kwargs):
choices = cls.objects.filter(**kwargs)
return choices and random.choice(choices) or None
return jingo.render(request, 'innovate/splash.html', {
'featured_project': get_random(Project, featured=True),
'featured_event': get_random(Event, featured=True),
'featured_user': get_random(Profile, featured=True),
'entry': get_random(Entry, link__featured=True)
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
- return jingo.render(request, 'handlers/404.html')
+ return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
- return jingo.render(request, 'handlers/500.html')
+ return jingo.render(request, 'handlers/500.html', status=500)
|
Add status codes to the 404/500 error handlers.
|
## Code Before:
import random
import jingo
from users.models import Profile
from projects.models import Project
from events.models import Event
from feeds.models import Entry
def splash(request):
"""Display splash page. With featured project, event, person, blog post."""
def get_random(cls, **kwargs):
choices = cls.objects.filter(**kwargs)
return choices and random.choice(choices) or None
return jingo.render(request, 'innovate/splash.html', {
'featured_project': get_random(Project, featured=True),
'featured_event': get_random(Event, featured=True),
'featured_user': get_random(Profile, featured=True),
'entry': get_random(Entry, link__featured=True)
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html')
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html')
## Instruction:
Add status codes to the 404/500 error handlers.
## Code After:
import random
import jingo
from users.models import Profile
from projects.models import Project
from events.models import Event
from feeds.models import Entry
def splash(request):
"""Display splash page. With featured project, event, person, blog post."""
def get_random(cls, **kwargs):
choices = cls.objects.filter(**kwargs)
return choices and random.choice(choices) or None
return jingo.render(request, 'innovate/splash.html', {
'featured_project': get_random(Project, featured=True),
'featured_event': get_random(Event, featured=True),
'featured_user': get_random(Profile, featured=True),
'entry': get_random(Entry, link__featured=True)
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html', status=500)
|
import random
import jingo
from users.models import Profile
from projects.models import Project
from events.models import Event
from feeds.models import Entry
def splash(request):
"""Display splash page. With featured project, event, person, blog post."""
def get_random(cls, **kwargs):
choices = cls.objects.filter(**kwargs)
return choices and random.choice(choices) or None
return jingo.render(request, 'innovate/splash.html', {
'featured_project': get_random(Project, featured=True),
'featured_event': get_random(Event, featured=True),
'featured_user': get_random(Profile, featured=True),
'entry': get_random(Entry, link__featured=True)
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
- return jingo.render(request, 'handlers/404.html')
+ return jingo.render(request, 'handlers/404.html', status=404)
? ++++++++++++
def handle500(request):
"""Handle server errors."""
- return jingo.render(request, 'handlers/500.html')
+ return jingo.render(request, 'handlers/500.html', status=500)
? ++++++++++++
|
6620032e9f8574c3e1dad37c111040eca570a751
|
features/memberships/models.py
|
features/memberships/models.py
|
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
class Membership(models.Model):
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
class Meta:
unique_together = ('group', 'member')
class Application(models.Model):
group = models.ForeignKey('groups.Group', related_name='applications')
contributions = contenttypes.GenericRelation(
'contributions.Contribution',
content_type_field='contribution_type',
object_id_field='contribution_id',
related_query_name='membership_application')
@property
def contribution(self):
return self.contributions.first()
|
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
from . import querysets
class Membership(models.Model):
class Meta:
unique_together = ('group', 'member')
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
objects = models.Manager.from_queryset(querysets.MembershipQuerySet)()
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
class Application(models.Model):
group = models.ForeignKey('groups.Group', related_name='applications')
contributions = contenttypes.GenericRelation(
'contributions.Contribution',
content_type_field='contribution_type',
object_id_field='contribution_id',
related_query_name='membership_application')
@property
def contribution(self):
return self.contributions.first()
|
Add queryset for ordering memberships by activity
|
Add queryset for ordering memberships by activity
|
Python
|
agpl-3.0
|
stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten
|
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
+ from . import querysets
+
class Membership(models.Model):
+ class Meta:
+ unique_together = ('group', 'member')
+
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
+ objects = models.Manager.from_queryset(querysets.MembershipQuerySet)()
+
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
-
- class Meta:
- unique_together = ('group', 'member')
class Application(models.Model):
group = models.ForeignKey('groups.Group', related_name='applications')
contributions = contenttypes.GenericRelation(
'contributions.Contribution',
content_type_field='contribution_type',
object_id_field='contribution_id',
related_query_name='membership_application')
@property
def contribution(self):
return self.contributions.first()
|
Add queryset for ordering memberships by activity
|
## Code Before:
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
class Membership(models.Model):
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
class Meta:
unique_together = ('group', 'member')
class Application(models.Model):
group = models.ForeignKey('groups.Group', related_name='applications')
contributions = contenttypes.GenericRelation(
'contributions.Contribution',
content_type_field='contribution_type',
object_id_field='contribution_id',
related_query_name='membership_application')
@property
def contribution(self):
return self.contributions.first()
## Instruction:
Add queryset for ordering memberships by activity
## Code After:
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
from . import querysets
class Membership(models.Model):
class Meta:
unique_together = ('group', 'member')
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
objects = models.Manager.from_queryset(querysets.MembershipQuerySet)()
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
class Application(models.Model):
group = models.ForeignKey('groups.Group', related_name='applications')
contributions = contenttypes.GenericRelation(
'contributions.Contribution',
content_type_field='contribution_type',
object_id_field='contribution_id',
related_query_name='membership_application')
@property
def contribution(self):
return self.contributions.first()
|
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
+ from . import querysets
+
class Membership(models.Model):
+ class Meta:
+ unique_together = ('group', 'member')
+
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
+ objects = models.Manager.from_queryset(querysets.MembershipQuerySet)()
+
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
-
- class Meta:
- unique_together = ('group', 'member')
class Application(models.Model):
group = models.ForeignKey('groups.Group', related_name='applications')
contributions = contenttypes.GenericRelation(
'contributions.Contribution',
content_type_field='contribution_type',
object_id_field='contribution_id',
related_query_name='membership_application')
@property
def contribution(self):
return self.contributions.first()
|
97f81ddfdd78d062e5019793101926fb52b0db38
|
sum.py
|
sum.py
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
new_view = self.view.window().new_file()
new_view.set_name('Sum')
new_view.insert(edit, 0, '42')
new_view.set_scratch(True)
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
new_view = self.view.window().new_file()
new_view.set_name('Sum')
new_view.insert(edit, 0, '42')
new_view.set_read_only(True)
new_view.set_scratch(True)
|
Set new file to read-only
|
Set new file to read-only
Since the new file does not prompt about file changes when closed, if
the user were to edit the new file and close without saving, their
changes would be lost forever. By setting the new file to be read-only,
the user will not be able to make changes to it that may be lost.
|
Python
|
mit
|
jbrudvik/sublime-sum,jbrudvik/sublime-sum
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
new_view = self.view.window().new_file()
new_view.set_name('Sum')
new_view.insert(edit, 0, '42')
+ new_view.set_read_only(True)
new_view.set_scratch(True)
|
Set new file to read-only
|
## Code Before:
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
new_view = self.view.window().new_file()
new_view.set_name('Sum')
new_view.insert(edit, 0, '42')
new_view.set_scratch(True)
## Instruction:
Set new file to read-only
## Code After:
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
new_view = self.view.window().new_file()
new_view.set_name('Sum')
new_view.insert(edit, 0, '42')
new_view.set_read_only(True)
new_view.set_scratch(True)
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
new_view = self.view.window().new_file()
new_view.set_name('Sum')
new_view.insert(edit, 0, '42')
+ new_view.set_read_only(True)
new_view.set_scratch(True)
|
21b1206da978434e388e43a5258b9c0f09fc0e1e
|
tumblr/data/cleanup.py
|
tumblr/data/cleanup.py
|
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
del lines[x]
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if size != 0:
continue
print(image)
remove_image_from_csvs(image)
os.remove(path)
|
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
lines[x] = lines[x].replace(image, image+".gif")
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if "." in image:
continue
print(image)
remove_image_from_csvs(image)
os.rename(path, path+".gif")
|
Add suffixes to all gifs
|
Add suffixes to all gifs
|
Python
|
mit
|
albertyw/devops-reactions-index,albertyw/devops-reactions-index,albertyw/reaction-pics,albertyw/reaction-pics,albertyw/reaction-pics,albertyw/devops-reactions-index,albertyw/devops-reactions-index,albertyw/reaction-pics
|
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
- del lines[x]
+ lines[x] = lines[x].replace(image, image+".gif")
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
- if size != 0:
+ if "." in image:
continue
print(image)
remove_image_from_csvs(image)
- os.remove(path)
+ os.rename(path, path+".gif")
|
Add suffixes to all gifs
|
## Code Before:
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
del lines[x]
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if size != 0:
continue
print(image)
remove_image_from_csvs(image)
os.remove(path)
## Instruction:
Add suffixes to all gifs
## Code After:
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
lines[x] = lines[x].replace(image, image+".gif")
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if "." in image:
continue
print(image)
remove_image_from_csvs(image)
os.rename(path, path+".gif")
|
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
- del lines[x]
+ lines[x] = lines[x].replace(image, image+".gif")
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
- if size != 0:
+ if "." in image:
continue
print(image)
remove_image_from_csvs(image)
- os.remove(path)
+ os.rename(path, path+".gif")
|
b7cee426db61801fd118758bb2f47944f3b8fd37
|
binaryornot/check.py
|
binaryornot/check.py
|
def get_starting_chunk(filename):
with(filename, 'r') as f:
chunk = open(filename).read(1024)
return chunk
def is_binary_string(bytes_to_check):
"""
:param bytes: A chunk of bytes to check.
:returns: True if appears to be a binary, otherwise False.
"""
textchars = ''.join(map(chr, [7,8,9,10,12,13,27] + range(0x20, 0x100)))
result = bytes_to_check.translate(None, textchars)
return bool(result)
def is_binary(filename):
"""
:param filename: File to check.
:returns: True if it's a binary file, otherwise False.
"""
chunk = get_starting_chunk(filename)
return is_binary_string(chunk)
|
def get_starting_chunk(filename):
with open(filename, 'r') as f:
chunk = f.read(1024)
return chunk
def is_binary_string(bytes_to_check):
"""
:param bytes: A chunk of bytes to check.
:returns: True if appears to be a binary, otherwise False.
"""
textchars = ''.join(map(chr, [7,8,9,10,12,13,27] + range(0x20, 0x100)))
result = bytes_to_check.translate(None, textchars)
return bool(result)
def is_binary(filename):
"""
:param filename: File to check.
:returns: True if it's a binary file, otherwise False.
"""
chunk = get_starting_chunk(filename)
return is_binary_string(chunk)
|
Fix file opening and make tests pass.
|
Fix file opening and make tests pass.
|
Python
|
bsd-3-clause
|
hackebrot/binaryornot,0k/binaryornot,hackebrot/binaryornot,hackebrot/binaryornot,pombredanne/binaryornot,audreyr/binaryornot,pombredanne/binaryornot,audreyr/binaryornot,0k/binaryornot,pombredanne/binaryornot,audreyr/binaryornot
|
def get_starting_chunk(filename):
- with(filename, 'r') as f:
+ with open(filename, 'r') as f:
- chunk = open(filename).read(1024)
+ chunk = f.read(1024)
return chunk
def is_binary_string(bytes_to_check):
"""
:param bytes: A chunk of bytes to check.
:returns: True if appears to be a binary, otherwise False.
"""
textchars = ''.join(map(chr, [7,8,9,10,12,13,27] + range(0x20, 0x100)))
result = bytes_to_check.translate(None, textchars)
return bool(result)
def is_binary(filename):
"""
:param filename: File to check.
:returns: True if it's a binary file, otherwise False.
"""
chunk = get_starting_chunk(filename)
return is_binary_string(chunk)
|
Fix file opening and make tests pass.
|
## Code Before:
def get_starting_chunk(filename):
with(filename, 'r') as f:
chunk = open(filename).read(1024)
return chunk
def is_binary_string(bytes_to_check):
"""
:param bytes: A chunk of bytes to check.
:returns: True if appears to be a binary, otherwise False.
"""
textchars = ''.join(map(chr, [7,8,9,10,12,13,27] + range(0x20, 0x100)))
result = bytes_to_check.translate(None, textchars)
return bool(result)
def is_binary(filename):
"""
:param filename: File to check.
:returns: True if it's a binary file, otherwise False.
"""
chunk = get_starting_chunk(filename)
return is_binary_string(chunk)
## Instruction:
Fix file opening and make tests pass.
## Code After:
def get_starting_chunk(filename):
with open(filename, 'r') as f:
chunk = f.read(1024)
return chunk
def is_binary_string(bytes_to_check):
"""
:param bytes: A chunk of bytes to check.
:returns: True if appears to be a binary, otherwise False.
"""
textchars = ''.join(map(chr, [7,8,9,10,12,13,27] + range(0x20, 0x100)))
result = bytes_to_check.translate(None, textchars)
return bool(result)
def is_binary(filename):
"""
:param filename: File to check.
:returns: True if it's a binary file, otherwise False.
"""
chunk = get_starting_chunk(filename)
return is_binary_string(chunk)
|
def get_starting_chunk(filename):
- with(filename, 'r') as f:
+ with open(filename, 'r') as f:
? +++++
- chunk = open(filename).read(1024)
? ----- --------
+ chunk = f.read(1024)
return chunk
def is_binary_string(bytes_to_check):
"""
:param bytes: A chunk of bytes to check.
:returns: True if appears to be a binary, otherwise False.
"""
textchars = ''.join(map(chr, [7,8,9,10,12,13,27] + range(0x20, 0x100)))
result = bytes_to_check.translate(None, textchars)
return bool(result)
def is_binary(filename):
"""
:param filename: File to check.
:returns: True if it's a binary file, otherwise False.
"""
chunk = get_starting_chunk(filename)
return is_binary_string(chunk)
|
3685715cd260f4f5ca392caddf7fb0c01af9ebcc
|
mzalendo/comments2/feeds.py
|
mzalendo/comments2/feeds.py
|
from disqus.wxr_feed import ContribCommentsWxrFeed
# from comments2.models import Comment
from core.models import Person
# http://help.disqus.com/customer/portal/articles/472150-custom-xml-import-format
class CommentWxrFeed(ContribCommentsWxrFeed):
link = "/"
def items(self):
return Person.objects.all()[:5] # remove [:5] before generating full dump
def item_pubdate(self, item):
return item.created
def item_description(self, item):
return str(item)
def item_guid(self, item):
# set to none so that the output dsq:thread_identifier is empty
return None
def item_comments(self, item):
return item.comments.all()
def comment_user_name(self, comment):
return str(comment.user)
def comment_user_email(self, comment):
return comment.user.email or str(comment.id) + '@bogus-email-address.com'
def comment_user_url(self, comment):
return None
def comment_is_approved(self, comment):
return 1
|
from disqus.wxr_feed import ContribCommentsWxrFeed
# from comments2.models import Comment
from core.models import Person, Place, Organisation
# http://help.disqus.com/customer/portal/articles/472150-custom-xml-import-format
class CommentWxrFeed(ContribCommentsWxrFeed):
link = "/"
def items(self):
list = []
list.extend( Person.objects.all() )
list.extend( Organisation.objects.all() )
list.extend( Place.objects.all() )
return list
def item_pubdate(self, item):
return item.created
def item_description(self, item):
return str(item)
def item_guid(self, item):
# set to none so that the output dsq:thread_identifier is empty
return None
def item_comments(self, item):
return item.comments.all()
def comment_user_name(self, comment):
return str(comment.user)
def comment_user_email(self, comment):
return comment.user.email or str(comment.id) + '@bogus-email-address.com'
def comment_user_url(self, comment):
return None
def comment_is_approved(self, comment):
return 1
|
Add in comments for orgs and places too, remove limit
|
Add in comments for orgs and places too, remove limit
|
Python
|
agpl-3.0
|
mysociety/pombola,Hutspace/odekro,ken-muturi/pombola,mysociety/pombola,Hutspace/odekro,patricmutwiri/pombola,geoffkilpin/pombola,patricmutwiri/pombola,mysociety/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,mysociety/pombola,mysociety/pombola,patricmutwiri/pombola,geoffkilpin/pombola,geoffkilpin/pombola,Hutspace/odekro,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,geoffkilpin/pombola,hzj123/56th,Hutspace/odekro,hzj123/56th,mysociety/pombola,patricmutwiri/pombola,hzj123/56th,hzj123/56th,ken-muturi/pombola,geoffkilpin/pombola,Hutspace/odekro,patricmutwiri/pombola,patricmutwiri/pombola,hzj123/56th
|
from disqus.wxr_feed import ContribCommentsWxrFeed
# from comments2.models import Comment
- from core.models import Person
+ from core.models import Person, Place, Organisation
# http://help.disqus.com/customer/portal/articles/472150-custom-xml-import-format
class CommentWxrFeed(ContribCommentsWxrFeed):
link = "/"
def items(self):
- return Person.objects.all()[:5] # remove [:5] before generating full dump
+ list = []
+ list.extend( Person.objects.all() )
+ list.extend( Organisation.objects.all() )
+ list.extend( Place.objects.all() )
+ return list
def item_pubdate(self, item):
return item.created
def item_description(self, item):
return str(item)
def item_guid(self, item):
# set to none so that the output dsq:thread_identifier is empty
return None
def item_comments(self, item):
return item.comments.all()
def comment_user_name(self, comment):
return str(comment.user)
def comment_user_email(self, comment):
return comment.user.email or str(comment.id) + '@bogus-email-address.com'
def comment_user_url(self, comment):
return None
def comment_is_approved(self, comment):
return 1
|
Add in comments for orgs and places too, remove limit
|
## Code Before:
from disqus.wxr_feed import ContribCommentsWxrFeed
# from comments2.models import Comment
from core.models import Person
# http://help.disqus.com/customer/portal/articles/472150-custom-xml-import-format
class CommentWxrFeed(ContribCommentsWxrFeed):
link = "/"
def items(self):
return Person.objects.all()[:5] # remove [:5] before generating full dump
def item_pubdate(self, item):
return item.created
def item_description(self, item):
return str(item)
def item_guid(self, item):
# set to none so that the output dsq:thread_identifier is empty
return None
def item_comments(self, item):
return item.comments.all()
def comment_user_name(self, comment):
return str(comment.user)
def comment_user_email(self, comment):
return comment.user.email or str(comment.id) + '@bogus-email-address.com'
def comment_user_url(self, comment):
return None
def comment_is_approved(self, comment):
return 1
## Instruction:
Add in comments for orgs and places too, remove limit
## Code After:
from disqus.wxr_feed import ContribCommentsWxrFeed
# from comments2.models import Comment
from core.models import Person, Place, Organisation
# http://help.disqus.com/customer/portal/articles/472150-custom-xml-import-format
class CommentWxrFeed(ContribCommentsWxrFeed):
link = "/"
def items(self):
list = []
list.extend( Person.objects.all() )
list.extend( Organisation.objects.all() )
list.extend( Place.objects.all() )
return list
def item_pubdate(self, item):
return item.created
def item_description(self, item):
return str(item)
def item_guid(self, item):
# set to none so that the output dsq:thread_identifier is empty
return None
def item_comments(self, item):
return item.comments.all()
def comment_user_name(self, comment):
return str(comment.user)
def comment_user_email(self, comment):
return comment.user.email or str(comment.id) + '@bogus-email-address.com'
def comment_user_url(self, comment):
return None
def comment_is_approved(self, comment):
return 1
|
from disqus.wxr_feed import ContribCommentsWxrFeed
# from comments2.models import Comment
- from core.models import Person
+ from core.models import Person, Place, Organisation
# http://help.disqus.com/customer/portal/articles/472150-custom-xml-import-format
class CommentWxrFeed(ContribCommentsWxrFeed):
link = "/"
def items(self):
- return Person.objects.all()[:5] # remove [:5] before generating full dump
+ list = []
+ list.extend( Person.objects.all() )
+ list.extend( Organisation.objects.all() )
+ list.extend( Place.objects.all() )
+ return list
def item_pubdate(self, item):
return item.created
def item_description(self, item):
return str(item)
def item_guid(self, item):
# set to none so that the output dsq:thread_identifier is empty
return None
def item_comments(self, item):
return item.comments.all()
def comment_user_name(self, comment):
return str(comment.user)
def comment_user_email(self, comment):
return comment.user.email or str(comment.id) + '@bogus-email-address.com'
def comment_user_url(self, comment):
return None
def comment_is_approved(self, comment):
return 1
|
dfa39db42cc5ce2c29da2ec0c388865ec7f41030
|
oauth2_provider/forms.py
|
oauth2_provider/forms.py
|
from django import forms
class AllowForm(forms.Form):
redirect_uri = forms.URLField(widget=forms.HiddenInput())
scopes = forms.CharField(required=False, widget=forms.HiddenInput())
client_id = forms.CharField(widget=forms.HiddenInput())
state = forms.CharField(required=False, widget=forms.HiddenInput())
response_type = forms.CharField(widget=forms.HiddenInput())
|
from django import forms
class AllowForm(forms.Form):
allow = forms.BooleanField(required=False)
redirect_uri = forms.URLField(widget=forms.HiddenInput())
scopes = forms.CharField(required=False, widget=forms.HiddenInput())
client_id = forms.CharField(widget=forms.HiddenInput())
state = forms.CharField(required=False, widget=forms.HiddenInput())
response_type = forms.CharField(widget=forms.HiddenInput())
|
Add allow field to form
|
Add allow field to form
|
Python
|
bsd-2-clause
|
trbs/django-oauth-toolkit,DeskConnect/django-oauth-toolkit,jensadne/django-oauth-toolkit,vmalavolta/django-oauth-toolkit,JensTimmerman/django-oauth-toolkit,JensTimmerman/django-oauth-toolkit,Gr1N/django-oauth-toolkit,andrefsp/django-oauth-toolkit,jensadne/django-oauth-toolkit,drgarcia1986/django-oauth-toolkit,bleib1dj/django-oauth-toolkit,CloudNcodeInc/django-oauth-toolkit,lzen/django-oauth-toolkit,Gr1N/django-oauth-toolkit,DeskConnect/django-oauth-toolkit,drgarcia1986/django-oauth-toolkit,vmalavolta/django-oauth-toolkit,mjrulesamrat/django-oauth-toolkit,andrefsp/django-oauth-toolkit,StepicOrg/django-oauth-toolkit,bleib1dj/django-oauth-toolkit,trbs/django-oauth-toolkit,CloudNcodeInc/django-oauth-toolkit,mjrulesamrat/django-oauth-toolkit,cheif/django-oauth-toolkit,Knotis/django-oauth-toolkit,lzen/django-oauth-toolkit,svetlyak40wt/django-oauth-toolkit,natgeo/django-oauth-toolkit,cheif/django-oauth-toolkit,Knotis/django-oauth-toolkit,StepicOrg/django-oauth-toolkit,Natgeoed/django-oauth-toolkit
|
from django import forms
class AllowForm(forms.Form):
+ allow = forms.BooleanField(required=False)
redirect_uri = forms.URLField(widget=forms.HiddenInput())
scopes = forms.CharField(required=False, widget=forms.HiddenInput())
client_id = forms.CharField(widget=forms.HiddenInput())
state = forms.CharField(required=False, widget=forms.HiddenInput())
response_type = forms.CharField(widget=forms.HiddenInput())
|
Add allow field to form
|
## Code Before:
from django import forms
class AllowForm(forms.Form):
redirect_uri = forms.URLField(widget=forms.HiddenInput())
scopes = forms.CharField(required=False, widget=forms.HiddenInput())
client_id = forms.CharField(widget=forms.HiddenInput())
state = forms.CharField(required=False, widget=forms.HiddenInput())
response_type = forms.CharField(widget=forms.HiddenInput())
## Instruction:
Add allow field to form
## Code After:
from django import forms
class AllowForm(forms.Form):
allow = forms.BooleanField(required=False)
redirect_uri = forms.URLField(widget=forms.HiddenInput())
scopes = forms.CharField(required=False, widget=forms.HiddenInput())
client_id = forms.CharField(widget=forms.HiddenInput())
state = forms.CharField(required=False, widget=forms.HiddenInput())
response_type = forms.CharField(widget=forms.HiddenInput())
|
from django import forms
class AllowForm(forms.Form):
+ allow = forms.BooleanField(required=False)
redirect_uri = forms.URLField(widget=forms.HiddenInput())
scopes = forms.CharField(required=False, widget=forms.HiddenInput())
client_id = forms.CharField(widget=forms.HiddenInput())
state = forms.CharField(required=False, widget=forms.HiddenInput())
response_type = forms.CharField(widget=forms.HiddenInput())
|
b1bc34e9a83cb3af5dd11baa1236f2b65ab823f9
|
cspreports/models.py
|
cspreports/models.py
|
import json
#LIBRARIES
from django.db import models
from django.utils.html import escape
from django.utils.safestring import mark_safe
class CSPReport(models.Model):
class Meta(object):
ordering = ('-created',)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
json = models.TextField()
@property
def data(self):
""" Returns self.json loaded as a python object. """
try:
data = self._data
except AttributeError:
data = self._data = json.loads(self.json)
return data
def json_as_html(self):
""" Print out self.json in a nice way. """
formatted_json = json.dumps(
self.data, sort_keys=True,
indent=4, separators=(',', ': ')
)
return mark_safe(u"<pre>\n%s</pre>" % escape(formatted_json))
|
from django.db import models
from django.utils.html import escape
from django.utils.safestring import mark_safe
# CSP REPORTS
from cspreports import utils
class CSPReport(models.Model):
class Meta(object):
ordering = ('-created',)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
json = models.TextField()
def json_as_html(self):
""" Print out self.json in a nice way. """
formatted_json = utils.format_report(self.json)
return mark_safe(u"<pre>\n%s</pre>" % escape(formatted_json))
|
Make `CSPReport.json_as_html` use the robust `utils.format_report` for formatting.
|
Make `CSPReport.json_as_html` use the robust `utils.format_report` for formatting.
|
Python
|
mit
|
adamalton/django-csp-reports
|
- import json
-
- #LIBRARIES
from django.db import models
from django.utils.html import escape
from django.utils.safestring import mark_safe
+
+ # CSP REPORTS
+ from cspreports import utils
class CSPReport(models.Model):
class Meta(object):
ordering = ('-created',)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
json = models.TextField()
- @property
- def data(self):
- """ Returns self.json loaded as a python object. """
- try:
- data = self._data
- except AttributeError:
- data = self._data = json.loads(self.json)
- return data
-
def json_as_html(self):
""" Print out self.json in a nice way. """
+ formatted_json = utils.format_report(self.json)
- formatted_json = json.dumps(
- self.data, sort_keys=True,
- indent=4, separators=(',', ': ')
- )
return mark_safe(u"<pre>\n%s</pre>" % escape(formatted_json))
|
Make `CSPReport.json_as_html` use the robust `utils.format_report` for formatting.
|
## Code Before:
import json
#LIBRARIES
from django.db import models
from django.utils.html import escape
from django.utils.safestring import mark_safe
class CSPReport(models.Model):
class Meta(object):
ordering = ('-created',)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
json = models.TextField()
@property
def data(self):
""" Returns self.json loaded as a python object. """
try:
data = self._data
except AttributeError:
data = self._data = json.loads(self.json)
return data
def json_as_html(self):
""" Print out self.json in a nice way. """
formatted_json = json.dumps(
self.data, sort_keys=True,
indent=4, separators=(',', ': ')
)
return mark_safe(u"<pre>\n%s</pre>" % escape(formatted_json))
## Instruction:
Make `CSPReport.json_as_html` use the robust `utils.format_report` for formatting.
## Code After:
from django.db import models
from django.utils.html import escape
from django.utils.safestring import mark_safe
# CSP REPORTS
from cspreports import utils
class CSPReport(models.Model):
class Meta(object):
ordering = ('-created',)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
json = models.TextField()
def json_as_html(self):
""" Print out self.json in a nice way. """
formatted_json = utils.format_report(self.json)
return mark_safe(u"<pre>\n%s</pre>" % escape(formatted_json))
|
- import json
-
- #LIBRARIES
from django.db import models
from django.utils.html import escape
from django.utils.safestring import mark_safe
+
+ # CSP REPORTS
+ from cspreports import utils
class CSPReport(models.Model):
class Meta(object):
ordering = ('-created',)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
json = models.TextField()
- @property
- def data(self):
- """ Returns self.json loaded as a python object. """
- try:
- data = self._data
- except AttributeError:
- data = self._data = json.loads(self.json)
- return data
-
def json_as_html(self):
""" Print out self.json in a nice way. """
+ formatted_json = utils.format_report(self.json)
- formatted_json = json.dumps(
- self.data, sort_keys=True,
- indent=4, separators=(',', ': ')
- )
return mark_safe(u"<pre>\n%s</pre>" % escape(formatted_json))
|
3620bafe1ce573d08fca7db357f4df40d6949cfb
|
flowz/channels/__init__.py
|
flowz/channels/__init__.py
|
from __future__ import absolute_import
from .core import (ChannelDone, Channel, ReadChannel, MapChannel, FlatMapChannel,
FilterChannel, FutureChannel, ReadyFutureChannel, TeeChannel,
ProducerChannel, IterChannel, ZipChannel, CoGroupChannel,
WindowChannel, GroupChannel)
|
from __future__ import absolute_import
from .core import (
Channel,
ChannelDone,
CoGroupChannel,
FilterChannel,
FlatMapChannel,
FutureChannel,
GroupChannel,
IterChannel,
MapChannel,
ProducerChannel,
ReadChannel,
ReadyFutureChannel,
TeeChannel,
WindowChannel,
ZipChannel)
|
Change to one package per line for channel import
|
Change to one package per line for channel import
Resolves #19.
While this doesn't switch to the ideal standard of one import, one
line, it makes the import from `flowz.channels.core` into
`flowz.channels` easier to read and less likely to invite conflicts.
Didn't go to "from .core import" per line primarily due to pragmatism,
though that would normally be my preference.
|
Python
|
mit
|
ethanrowe/flowz,PatrickDRusk/flowz
|
from __future__ import absolute_import
- from .core import (ChannelDone, Channel, ReadChannel, MapChannel, FlatMapChannel,
- FilterChannel, FutureChannel, ReadyFutureChannel, TeeChannel,
- ProducerChannel, IterChannel, ZipChannel, CoGroupChannel,
- WindowChannel, GroupChannel)
+ from .core import (
+ Channel,
+ ChannelDone,
+ CoGroupChannel,
+ FilterChannel,
+ FlatMapChannel,
+ FutureChannel,
+ GroupChannel,
+ IterChannel,
+ MapChannel,
+ ProducerChannel,
+ ReadChannel,
+ ReadyFutureChannel,
+ TeeChannel,
+ WindowChannel,
+ ZipChannel)
+
|
Change to one package per line for channel import
|
## Code Before:
from __future__ import absolute_import
from .core import (ChannelDone, Channel, ReadChannel, MapChannel, FlatMapChannel,
FilterChannel, FutureChannel, ReadyFutureChannel, TeeChannel,
ProducerChannel, IterChannel, ZipChannel, CoGroupChannel,
WindowChannel, GroupChannel)
## Instruction:
Change to one package per line for channel import
## Code After:
from __future__ import absolute_import
from .core import (
Channel,
ChannelDone,
CoGroupChannel,
FilterChannel,
FlatMapChannel,
FutureChannel,
GroupChannel,
IterChannel,
MapChannel,
ProducerChannel,
ReadChannel,
ReadyFutureChannel,
TeeChannel,
WindowChannel,
ZipChannel)
|
from __future__ import absolute_import
- from .core import (ChannelDone, Channel, ReadChannel, MapChannel, FlatMapChannel,
- FilterChannel, FutureChannel, ReadyFutureChannel, TeeChannel,
- ProducerChannel, IterChannel, ZipChannel, CoGroupChannel,
- WindowChannel, GroupChannel)
+ from .core import (
+ Channel,
+ ChannelDone,
+ CoGroupChannel,
+ FilterChannel,
+ FlatMapChannel,
+ FutureChannel,
+ GroupChannel,
+ IterChannel,
+ MapChannel,
+ ProducerChannel,
+ ReadChannel,
+ ReadyFutureChannel,
+ TeeChannel,
+ WindowChannel,
+ ZipChannel)
+
|
1c9feb7b2d9a4ac1a1d3bef42139ec5a7f26b95e
|
jsonrpcclient/__init__.py
|
jsonrpcclient/__init__.py
|
"""__init__.py"""
import logging
logger = logging.getLogger('jsonrpcclient')
logger.addHandler(logging.StreamHandler())
from jsonrpcclient.server import Server
|
"""__init__.py"""
import logging
logger = logging.getLogger('jsonrpcclient')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
from jsonrpcclient.server import Server
|
Set the loglevel again, seems like in certain situations, the default log level is 0
|
Set the loglevel again, seems like in certain situations, the default log level is 0
|
Python
|
mit
|
bcb/jsonrpcclient
|
"""__init__.py"""
import logging
logger = logging.getLogger('jsonrpcclient')
logger.addHandler(logging.StreamHandler())
+ logger.setLevel(logging.WARNING)
from jsonrpcclient.server import Server
|
Set the loglevel again, seems like in certain situations, the default log level is 0
|
## Code Before:
"""__init__.py"""
import logging
logger = logging.getLogger('jsonrpcclient')
logger.addHandler(logging.StreamHandler())
from jsonrpcclient.server import Server
## Instruction:
Set the loglevel again, seems like in certain situations, the default log level is 0
## Code After:
"""__init__.py"""
import logging
logger = logging.getLogger('jsonrpcclient')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
from jsonrpcclient.server import Server
|
"""__init__.py"""
import logging
logger = logging.getLogger('jsonrpcclient')
logger.addHandler(logging.StreamHandler())
+ logger.setLevel(logging.WARNING)
from jsonrpcclient.server import Server
|
48ab9fa0e54103a08fec54d8a4d4870dc701d918
|
genes/systemd/commands.py
|
genes/systemd/commands.py
|
from subprocess import Popen
from typing import List
def systemctl(*args: List[str]):
Popen(['systemctl'] + list(args))
def start(service: str):
systemctl('start', service)
def stop(service: str):
systemctl('stop', service)
def restart(service: str):
systemctl('restart', service)
def reload(service: str):
systemctl('reload', service)
|
from subprocess import Popen
from typing import Tuple
def systemctl(*args: Tuple[str, ...]) -> None:
Popen(['systemctl'] + list(args))
def disable(*services: Tuple[str, ...]) -> None:
return systemctl('disable', *services)
def enable(*services: Tuple[str, ...]) -> None:
return systemctl('enable', *services)
def start(*services: Tuple[str, ...]) -> None:
return systemctl('start', *services)
def stop(*services: Tuple[str, ...]) -> None:
return systemctl('stop', *services)
def reload(*services: Tuple[str, ...]) -> None:
return systemctl('reload', *services)
def restart(services: Tuple[str, ...]) -> None:
return systemctl('restart', *services)
|
Add more functions, improve type checking
|
Add more functions, improve type checking
|
Python
|
mit
|
hatchery/genepool,hatchery/Genepool2
|
from subprocess import Popen
- from typing import List
+ from typing import Tuple
- def systemctl(*args: List[str]):
+ def systemctl(*args: Tuple[str, ...]) -> None:
Popen(['systemctl'] + list(args))
- def start(service: str):
+ def disable(*services: Tuple[str, ...]) -> None:
- systemctl('start', service)
+ return systemctl('disable', *services)
- def stop(service: str):
- systemctl('stop', service)
+ def enable(*services: Tuple[str, ...]) -> None:
+ return systemctl('enable', *services)
- def restart(service: str):
+ def start(*services: Tuple[str, ...]) -> None:
- systemctl('restart', service)
+ return systemctl('start', *services)
- def reload(service: str):
+ def stop(*services: Tuple[str, ...]) -> None:
- systemctl('reload', service)
+ return systemctl('stop', *services)
+
+ def reload(*services: Tuple[str, ...]) -> None:
+ return systemctl('reload', *services)
+
+
+ def restart(services: Tuple[str, ...]) -> None:
+ return systemctl('restart', *services)
+
|
Add more functions, improve type checking
|
## Code Before:
from subprocess import Popen
from typing import List
def systemctl(*args: List[str]):
Popen(['systemctl'] + list(args))
def start(service: str):
systemctl('start', service)
def stop(service: str):
systemctl('stop', service)
def restart(service: str):
systemctl('restart', service)
def reload(service: str):
systemctl('reload', service)
## Instruction:
Add more functions, improve type checking
## Code After:
from subprocess import Popen
from typing import Tuple
def systemctl(*args: Tuple[str, ...]) -> None:
Popen(['systemctl'] + list(args))
def disable(*services: Tuple[str, ...]) -> None:
return systemctl('disable', *services)
def enable(*services: Tuple[str, ...]) -> None:
return systemctl('enable', *services)
def start(*services: Tuple[str, ...]) -> None:
return systemctl('start', *services)
def stop(*services: Tuple[str, ...]) -> None:
return systemctl('stop', *services)
def reload(*services: Tuple[str, ...]) -> None:
return systemctl('reload', *services)
def restart(services: Tuple[str, ...]) -> None:
return systemctl('restart', *services)
|
from subprocess import Popen
- from typing import List
? ^^^^
+ from typing import Tuple
? ^^^^^
- def systemctl(*args: List[str]):
+ def systemctl(*args: Tuple[str, ...]) -> None:
Popen(['systemctl'] + list(args))
- def start(service: str):
+ def disable(*services: Tuple[str, ...]) -> None:
- systemctl('start', service)
? - ^^
+ return systemctl('disable', *services)
? +++++++ ++ ^^^ + +
- def stop(service: str):
- systemctl('stop', service)
+ def enable(*services: Tuple[str, ...]) -> None:
+ return systemctl('enable', *services)
- def restart(service: str):
+ def start(*services: Tuple[str, ...]) -> None:
- systemctl('restart', service)
? --
+ return systemctl('start', *services)
? +++++++ + +
- def reload(service: str):
+ def stop(*services: Tuple[str, ...]) -> None:
+ return systemctl('stop', *services)
+
+
+ def reload(*services: Tuple[str, ...]) -> None:
- systemctl('reload', service)
+ return systemctl('reload', *services)
? +++++++ + +
+
+
+ def restart(services: Tuple[str, ...]) -> None:
+ return systemctl('restart', *services)
|
4a711a2709ec5d8a8e04bb0f735fcfaa319cffdf
|
designate/objects/validation_error.py
|
designate/objects/validation_error.py
|
import six
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = six.text_type(js_error)
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
|
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = js_error.message
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
|
Fix the displayed error message in V2 API
|
Fix the displayed error message in V2 API
Change-Id: I07c3f1ed79fa507dbe9b76eb8f5964475516754c
|
Python
|
apache-2.0
|
tonyli71/designate,openstack/designate,ionrock/designate,ionrock/designate,ramsateesh/designate,grahamhayes/designate,cneill/designate-testing,muraliselva10/designate,muraliselva10/designate,cneill/designate-testing,openstack/designate,tonyli71/designate,muraliselva10/designate,grahamhayes/designate,ionrock/designate,tonyli71/designate,grahamhayes/designate,openstack/designate,ramsateesh/designate,cneill/designate-testing,ramsateesh/designate
|
- import six
-
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
- e.message = six.text_type(js_error)
+ e.message = js_error.message
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
|
Fix the displayed error message in V2 API
|
## Code Before:
import six
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = six.text_type(js_error)
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
## Instruction:
Fix the displayed error message in V2 API
## Code After:
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = js_error.message
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
|
- import six
-
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
- e.message = six.text_type(js_error)
+ e.message = js_error.message
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
|
0c1b0a7787bd6824815ae208edab8f208b53af09
|
api/base/exceptions.py
|
api/base/exceptions.py
|
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
if response is not None and response.data['errors'][0]['detail'] == "Authentication credentials were not provided.":
response.status_code = 401
return response
|
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
# Returns 401 instead of 403 during unauthorized requests without having user to log in with Basic Auth
if response is not None and response.data['errors'][0]['detail'] == "Authentication credentials were not provided.":
response.status_code = 401
return response
|
Add comment to override of status code
|
Add comment to override of status code
|
Python
|
apache-2.0
|
Ghalko/osf.io,billyhunt/osf.io,wearpants/osf.io,asanfilippo7/osf.io,ticklemepierce/osf.io,kch8qx/osf.io,GageGaskins/osf.io,njantrania/osf.io,mluke93/osf.io,baylee-d/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,wearpants/osf.io,kwierman/osf.io,leb2dg/osf.io,leb2dg/osf.io,chennan47/osf.io,arpitar/osf.io,leb2dg/osf.io,erinspace/osf.io,Johnetordoff/osf.io,Ghalko/osf.io,petermalcolm/osf.io,adlius/osf.io,abought/osf.io,icereval/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,saradbowman/osf.io,DanielSBrown/osf.io,sbt9uc/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,felliott/osf.io,Nesiehr/osf.io,felliott/osf.io,SSJohns/osf.io,baylee-d/osf.io,KAsante95/osf.io,billyhunt/osf.io,sbt9uc/osf.io,mluke93/osf.io,mluo613/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,samanehsan/osf.io,mluke93/osf.io,crcresearch/osf.io,ticklemepierce/osf.io,petermalcolm/osf.io,caseyrollins/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,amyshi188/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,billyhunt/osf.io,arpitar/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,samchrisinger/osf.io,emetsger/osf.io,Ghalko/osf.io,haoyuchen1992/osf.io,alexschiller/osf.io,doublebits/osf.io,abought/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,laurenrevere/osf.io,SSJohns/osf.io,aaxelb/osf.io,ZobairAlijan/osf.io,kwierman/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,sbt9uc/osf.io,TomHeatwole/osf.io,pattisdr/osf.io,Nesiehr/osf.io,binoculars/osf.io,kwierman/osf.io,emetsger/osf.io,petermalcolm/osf.io,zachjanicki/osf.io,zamattiac/osf.io,crcresearch/osf.io,felliott/osf.io,doublebits/osf.io,rdhyee/osf.io,leb2dg/osf.io,alexschiller/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,acshi/osf.io,njantrania/osf.io,billyhunt/osf.io,SSJohns/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,cslzchen/osf.io,zamattiac/osf.io,emetsger/osf.io,abought/osf.io,mluke93/osf.io,RomanZWang/osf.io,danielneis/osf.io,acshi/osf.io,danielneis/osf.io,erinspace/osf.io,samchrisinger/osf.io,wearpants/osf.io,aaxelb/osf.io,rdhyee/osf.io,doublebits/osf.io,zamattiac/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,KAsante95/osf.io,GageGaskins/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,hmoco/osf.io,icereval/osf.io,danielneis/osf.io,TomHeatwole/osf.io,njantrania/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,SSJohns/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,CenterForOpenScience/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,zamattiac/osf.io,njantrania/osf.io,ticklemepierce/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,jnayak1/osf.io,billyhunt/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,mluo613/osf.io,chennan47/osf.io,binoculars/osf.io,hmoco/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,samanehsan/osf.io,caneruguz/osf.io,kch8qx/osf.io,doublebits/osf.io,samanehsan/osf.io,acshi/osf.io,TomHeatwole/osf.io,aaxelb/osf.io,sloria/osf.io,KAsante95/osf.io,sbt9uc/osf.io,monikagrabowska/osf.io,caseyrygt/osf.io,caseyrygt/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,laurenrevere/osf.io,petermalcolm/osf.io,wearpants/osf.io,emetsger/osf.io,mluo613/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,chrisseto/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,brandonPurvis/osf.io,adlius/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,sloria/osf.io,monikagrabowska/osf.io,cosenal/osf.io,doublebits/osf.io,TomBaxter/osf.io,DanielSBrown/osf.io,KAsante95/osf.io,saradbowman/osf.io,haoyuchen1992/osf.io,mfraezz/osf.io,kch8qx/osf.io,crcresearch/osf.io,jnayak1/osf.io,abought/osf.io,cosenal/osf.io,cslzchen/osf.io,icereval/osf.io,monikagrabowska/osf.io,mluo613/osf.io,mfraezz/osf.io,danielneis/osf.io,sloria/osf.io,jnayak1/osf.io,acshi/osf.io,caseyrygt/osf.io,mluo613/osf.io,cwisecarver/osf.io,cosenal/osf.io,hmoco/osf.io,amyshi188/osf.io,acshi/osf.io,TomHeatwole/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,aaxelb/osf.io,adlius/osf.io,KAsante95/osf.io,TomBaxter/osf.io,mattclark/osf.io,pattisdr/osf.io,brandonPurvis/osf.io,jnayak1/osf.io,arpitar/osf.io,binoculars/osf.io,arpitar/osf.io,brianjgeiger/osf.io,cosenal/osf.io,mfraezz/osf.io,alexschiller/osf.io,baylee-d/osf.io,chrisseto/osf.io,GageGaskins/osf.io,mattclark/osf.io,zachjanicki/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,samchrisinger/osf.io,brianjgeiger/osf.io,adlius/osf.io,cslzchen/osf.io,Nesiehr/osf.io,ZobairAlijan/osf.io,Ghalko/osf.io,erinspace/osf.io,amyshi188/osf.io,cwisecarver/osf.io,felliott/osf.io,mattclark/osf.io,cslzchen/osf.io
|
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
+ # Returns 401 instead of 403 during unauthorized requests without having user to log in with Basic Auth
if response is not None and response.data['errors'][0]['detail'] == "Authentication credentials were not provided.":
response.status_code = 401
return response
|
Add comment to override of status code
|
## Code Before:
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
if response is not None and response.data['errors'][0]['detail'] == "Authentication credentials were not provided.":
response.status_code = 401
return response
## Instruction:
Add comment to override of status code
## Code After:
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
# Returns 401 instead of 403 during unauthorized requests without having user to log in with Basic Auth
if response is not None and response.data['errors'][0]['detail'] == "Authentication credentials were not provided.":
response.status_code = 401
return response
|
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
+ # Returns 401 instead of 403 during unauthorized requests without having user to log in with Basic Auth
if response is not None and response.data['errors'][0]['detail'] == "Authentication credentials were not provided.":
response.status_code = 401
return response
|
83f62bd5993ba253183f120567a2a42108c4b7b4
|
setup.py
|
setup.py
|
from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=['mahjong'],
version='1.0.1',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='[email protected]',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
Right now it supports only japanese version (riichi mahjong). MCR (chinese version) in plans
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=[
'mahjong',
'mahjong.hand_calculating',
'mahjong.hand_calculating.yaku_list',
'mahjong.hand_calculating.yaku_list.yakuman',
],
version='1.0.2',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='[email protected]',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
Add missed packages tot he build script
|
Add missed packages tot he build script
|
Python
|
mit
|
MahjongRepository/mahjong
|
from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
+
+ Right now it supports only japanese version (riichi mahjong). MCR (chinese version) in plans
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
- packages=['mahjong'],
+ packages=[
+ 'mahjong',
+ 'mahjong.hand_calculating',
+ 'mahjong.hand_calculating.yaku_list',
+ 'mahjong.hand_calculating.yaku_list.yakuman',
+ ],
- version='1.0.1',
+ version='1.0.2',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='[email protected]',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
Add missed packages tot he build script
|
## Code Before:
from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=['mahjong'],
version='1.0.1',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='[email protected]',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
## Instruction:
Add missed packages tot he build script
## Code After:
from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
Right now it supports only japanese version (riichi mahjong). MCR (chinese version) in plans
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=[
'mahjong',
'mahjong.hand_calculating',
'mahjong.hand_calculating.yaku_list',
'mahjong.hand_calculating.yaku_list.yakuman',
],
version='1.0.2',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='[email protected]',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
+
+ Right now it supports only japanese version (riichi mahjong). MCR (chinese version) in plans
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
- packages=['mahjong'],
+ packages=[
+ 'mahjong',
+ 'mahjong.hand_calculating',
+ 'mahjong.hand_calculating.yaku_list',
+ 'mahjong.hand_calculating.yaku_list.yakuman',
+ ],
- version='1.0.1',
? ^
+ version='1.0.2',
? ^
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='[email protected]',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
3a0cf1f6114d6c80909f90fe122b026908200b0a
|
IPython/nbconvert/exporters/markdown.py
|
IPython/nbconvert/exporters/markdown.py
|
"""Markdown Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.config import Config
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class MarkdownExporter(TemplateExporter):
"""
Exports to a markdown document (.md)
"""
def _file_extension_default(self):
return 'md'
def _template_file_default(self):
return 'markdown'
output_mimetype = 'text/markdown'
def _raw_mimetypes_default(self):
return ['text/markdown', 'text/html', '']
@property
def default_config(self):
c = Config({
'NbConvertBase': {
'display_data_priority': ['html', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text']
},
'ExtractOutputPreprocessor': {
'enabled':True}
})
c.merge(super(MarkdownExporter,self).default_config)
return c
|
"""Markdown Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.config import Config
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class MarkdownExporter(TemplateExporter):
"""
Exports to a markdown document (.md)
"""
def _file_extension_default(self):
return 'md'
def _template_file_default(self):
return 'markdown'
output_mimetype = 'text/markdown'
def _raw_mimetypes_default(self):
return ['text/markdown', 'text/html', '']
@property
def default_config(self):
c = Config({'ExtractOutputPreprocessor':{'enabled':True}})
c.merge(super(MarkdownExporter,self).default_config)
return c
|
Revert "Removed Javascript from Markdown by adding display priority to def config."
|
Revert "Removed Javascript from Markdown by adding display priority to def config."
This reverts commit 58e05f9625c60f8deba9ddf1c74dba73e8ea7dd1.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
"""Markdown Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.config import Config
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class MarkdownExporter(TemplateExporter):
"""
Exports to a markdown document (.md)
"""
def _file_extension_default(self):
return 'md'
def _template_file_default(self):
return 'markdown'
output_mimetype = 'text/markdown'
def _raw_mimetypes_default(self):
return ['text/markdown', 'text/html', '']
@property
def default_config(self):
+ c = Config({'ExtractOutputPreprocessor':{'enabled':True}})
- c = Config({
- 'NbConvertBase': {
- 'display_data_priority': ['html', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text']
- },
- 'ExtractOutputPreprocessor': {
- 'enabled':True}
- })
c.merge(super(MarkdownExporter,self).default_config)
return c
|
Revert "Removed Javascript from Markdown by adding display priority to def config."
|
## Code Before:
"""Markdown Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.config import Config
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class MarkdownExporter(TemplateExporter):
"""
Exports to a markdown document (.md)
"""
def _file_extension_default(self):
return 'md'
def _template_file_default(self):
return 'markdown'
output_mimetype = 'text/markdown'
def _raw_mimetypes_default(self):
return ['text/markdown', 'text/html', '']
@property
def default_config(self):
c = Config({
'NbConvertBase': {
'display_data_priority': ['html', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text']
},
'ExtractOutputPreprocessor': {
'enabled':True}
})
c.merge(super(MarkdownExporter,self).default_config)
return c
## Instruction:
Revert "Removed Javascript from Markdown by adding display priority to def config."
## Code After:
"""Markdown Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.config import Config
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class MarkdownExporter(TemplateExporter):
"""
Exports to a markdown document (.md)
"""
def _file_extension_default(self):
return 'md'
def _template_file_default(self):
return 'markdown'
output_mimetype = 'text/markdown'
def _raw_mimetypes_default(self):
return ['text/markdown', 'text/html', '']
@property
def default_config(self):
c = Config({'ExtractOutputPreprocessor':{'enabled':True}})
c.merge(super(MarkdownExporter,self).default_config)
return c
|
"""Markdown Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.config import Config
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class MarkdownExporter(TemplateExporter):
"""
Exports to a markdown document (.md)
"""
def _file_extension_default(self):
return 'md'
def _template_file_default(self):
return 'markdown'
output_mimetype = 'text/markdown'
def _raw_mimetypes_default(self):
return ['text/markdown', 'text/html', '']
@property
def default_config(self):
+ c = Config({'ExtractOutputPreprocessor':{'enabled':True}})
- c = Config({
- 'NbConvertBase': {
- 'display_data_priority': ['html', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text']
- },
- 'ExtractOutputPreprocessor': {
- 'enabled':True}
- })
c.merge(super(MarkdownExporter,self).default_config)
return c
|
0c785e349c2000bbf3b22671071a66eaca4d82d0
|
astropy/io/votable/__init__.py
|
astropy/io/votable/__init__.py
|
from .table import (
parse, parse_single_table, validate, from_table, is_votable)
from .exceptions import (
VOWarning, VOTableChangeWarning, VOTableSpecWarning, UnimplementedWarning,
IOWarning, VOTableSpecError)
__all__ = [
'parse', 'parse_single_table', 'validate', 'from_table',
'is_votable', 'VOWarning', 'VOTableChangeWarning', 'VOTableSpecWarning',
'UnimplementedWarning', 'IOWarning', 'VOTableSpecError']
|
from .table import (
parse, parse_single_table, validate, from_table, is_votable, writeto)
from .exceptions import (
VOWarning, VOTableChangeWarning, VOTableSpecWarning, UnimplementedWarning,
IOWarning, VOTableSpecError)
__all__ = [
'parse', 'parse_single_table', 'validate', 'from_table',
'is_votable', 'writeto', 'VOWarning', 'VOTableChangeWarning',
'VOTableSpecWarning', 'UnimplementedWarning', 'IOWarning',
'VOTableSpecError']
|
Put astropy.io.votable.writeto in the top-level namespace
|
Put astropy.io.votable.writeto in the top-level namespace
|
Python
|
bsd-3-clause
|
DougBurke/astropy,AustereCuriosity/astropy,funbaker/astropy,joergdietrich/astropy,StuartLittlefair/astropy,larrybradley/astropy,tbabej/astropy,mhvk/astropy,pllim/astropy,stargaser/astropy,lpsinger/astropy,joergdietrich/astropy,lpsinger/astropy,AustereCuriosity/astropy,kelle/astropy,saimn/astropy,DougBurke/astropy,bsipocz/astropy,mhvk/astropy,pllim/astropy,StuartLittlefair/astropy,astropy/astropy,saimn/astropy,dhomeier/astropy,StuartLittlefair/astropy,tbabej/astropy,joergdietrich/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,funbaker/astropy,lpsinger/astropy,tbabej/astropy,AustereCuriosity/astropy,larrybradley/astropy,larrybradley/astropy,kelle/astropy,lpsinger/astropy,dhomeier/astropy,bsipocz/astropy,bsipocz/astropy,pllim/astropy,astropy/astropy,lpsinger/astropy,StuartLittlefair/astropy,joergdietrich/astropy,kelle/astropy,pllim/astropy,astropy/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,joergdietrich/astropy,stargaser/astropy,saimn/astropy,tbabej/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,mhvk/astropy,saimn/astropy,mhvk/astropy,dhomeier/astropy,DougBurke/astropy,dhomeier/astropy,funbaker/astropy,DougBurke/astropy,funbaker/astropy,mhvk/astropy,MSeifert04/astropy,larrybradley/astropy,stargaser/astropy,stargaser/astropy,bsipocz/astropy,kelle/astropy,saimn/astropy,astropy/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,kelle/astropy,MSeifert04/astropy,MSeifert04/astropy,astropy/astropy,tbabej/astropy,StuartLittlefair/astropy,pllim/astropy,AustereCuriosity/astropy
|
from .table import (
- parse, parse_single_table, validate, from_table, is_votable)
+ parse, parse_single_table, validate, from_table, is_votable, writeto)
from .exceptions import (
VOWarning, VOTableChangeWarning, VOTableSpecWarning, UnimplementedWarning,
IOWarning, VOTableSpecError)
__all__ = [
'parse', 'parse_single_table', 'validate', 'from_table',
- 'is_votable', 'VOWarning', 'VOTableChangeWarning', 'VOTableSpecWarning',
+ 'is_votable', 'writeto', 'VOWarning', 'VOTableChangeWarning',
- 'UnimplementedWarning', 'IOWarning', 'VOTableSpecError']
+ 'VOTableSpecWarning', 'UnimplementedWarning', 'IOWarning',
+ 'VOTableSpecError']
|
Put astropy.io.votable.writeto in the top-level namespace
|
## Code Before:
from .table import (
parse, parse_single_table, validate, from_table, is_votable)
from .exceptions import (
VOWarning, VOTableChangeWarning, VOTableSpecWarning, UnimplementedWarning,
IOWarning, VOTableSpecError)
__all__ = [
'parse', 'parse_single_table', 'validate', 'from_table',
'is_votable', 'VOWarning', 'VOTableChangeWarning', 'VOTableSpecWarning',
'UnimplementedWarning', 'IOWarning', 'VOTableSpecError']
## Instruction:
Put astropy.io.votable.writeto in the top-level namespace
## Code After:
from .table import (
parse, parse_single_table, validate, from_table, is_votable, writeto)
from .exceptions import (
VOWarning, VOTableChangeWarning, VOTableSpecWarning, UnimplementedWarning,
IOWarning, VOTableSpecError)
__all__ = [
'parse', 'parse_single_table', 'validate', 'from_table',
'is_votable', 'writeto', 'VOWarning', 'VOTableChangeWarning',
'VOTableSpecWarning', 'UnimplementedWarning', 'IOWarning',
'VOTableSpecError']
|
from .table import (
- parse, parse_single_table, validate, from_table, is_votable)
+ parse, parse_single_table, validate, from_table, is_votable, writeto)
? +++++++++
from .exceptions import (
VOWarning, VOTableChangeWarning, VOTableSpecWarning, UnimplementedWarning,
IOWarning, VOTableSpecError)
__all__ = [
'parse', 'parse_single_table', 'validate', 'from_table',
- 'is_votable', 'VOWarning', 'VOTableChangeWarning', 'VOTableSpecWarning',
? ----------------------
+ 'is_votable', 'writeto', 'VOWarning', 'VOTableChangeWarning',
? +++++++++++
- 'UnimplementedWarning', 'IOWarning', 'VOTableSpecError']
+ 'VOTableSpecWarning', 'UnimplementedWarning', 'IOWarning',
+ 'VOTableSpecError']
|
d562756f6b48366508db6ef9ffb27e3d5c707845
|
root/main.py
|
root/main.py
|
from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
|
from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector, datapath = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
|
Fix bug in example code
|
Fix bug in example code
Fixes:
line 6, in query_google
driver, waiter, selector = init()
ValueError: too many values to unpack (expected 3)
|
Python
|
apache-2.0
|
weihanwang/webdriver-python,weihanwang/webdriver-python
|
from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
- driver, waiter, selector = init()
+ driver, waiter, selector, datapath = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
+
|
Fix bug in example code
|
## Code Before:
from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
## Instruction:
Fix bug in example code
## Code After:
from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector, datapath = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
|
from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
- driver, waiter, selector = init()
+ driver, waiter, selector, datapath = init()
? ++++++++++
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
|
d101b7f023db1583ca7b65899bfdef296f838ad2
|
openspending/ui/validation/source.py
|
openspending/ui/validation/source.py
|
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
|
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
|
Fix PEP8 issues in openspending/ui/validation.
|
Fix PEP8 issues in openspending/ui/validation.
|
Python
|
agpl-3.0
|
CivicVision/datahub,openspending/spendb,CivicVision/datahub,spendb/spendb,spendb/spendb,johnjohndoe/spendb,USStateDept/FPA_Core,nathanhilbert/FPA_Core,openspending/spendb,spendb/spendb,USStateDept/FPA_Core,USStateDept/FPA_Core,johnjohndoe/spendb,openspending/spendb,nathanhilbert/FPA_Core,johnjohndoe/spendb,pudo/spendb,nathanhilbert/FPA_Core,CivicVision/datahub,pudo/spendb,pudo/spendb
|
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
+
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
- "at the moment."
+ "at the moment."
return True
+
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
- nonempty_string,
+ nonempty_string,
- valid_url
+ valid_url
- )))
+ )))
return schema
-
-
-
-
|
Fix PEP8 issues in openspending/ui/validation.
|
## Code Before:
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
## Instruction:
Fix PEP8 issues in openspending/ui/validation.
## Code After:
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
|
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
+
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
- "at the moment."
? -
+ "at the moment."
return True
+
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
- nonempty_string,
? ----
+ nonempty_string,
- valid_url
? ----
+ valid_url
- )))
? ----
+ )))
return schema
-
-
-
-
|
3313d611d7cc66bf607a341a5d9a6a5d96dfbec5
|
clowder_server/emailer.py
|
clowder_server/emailer.py
|
import os
import requests
from django.core.mail import send_mail
from clowder_account.models import ClowderUser
ADMIN_EMAIL = '[email protected]'
def send_alert(company, name):
for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True):
subject = 'FAILURE: %s' % (name)
body = subject
if user.company_id == 86:
slack_token = os.getenv('PARKME_SLACK_TOKEN')
url = 'https://hooks.slack.com/services/%s' % (slack_token)
payload = {"username": "devopsbot", "text": body, "icon_emoji": ":robot_face:"}
requests.post(url, json=payload)
send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
|
import os
import requests
from django.core.mail import send_mail
from clowder_account.models import ClowderUser
ADMIN_EMAIL = '[email protected]'
def send_alert(company, name):
slack_sent = False
for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True):
subject = 'FAILURE: %s' % (name)
body = subject
if user.company_id == 86 and not slack_sent:
slack_token = os.getenv('PARKME_SLACK_TOKEN')
url = 'https://hooks.slack.com/services/%s' % (slack_token)
payload = {"username": "clowder", "text": body, "icon_emoji": ":clowder:"}
requests.post(url, json=payload)
slack_sent = True
send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
|
Rename bot and prevent channel spamming
|
Rename bot and prevent channel spamming
|
Python
|
agpl-3.0
|
keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server
|
import os
import requests
from django.core.mail import send_mail
from clowder_account.models import ClowderUser
ADMIN_EMAIL = '[email protected]'
def send_alert(company, name):
+ slack_sent = False
for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True):
subject = 'FAILURE: %s' % (name)
body = subject
- if user.company_id == 86:
+ if user.company_id == 86 and not slack_sent:
slack_token = os.getenv('PARKME_SLACK_TOKEN')
url = 'https://hooks.slack.com/services/%s' % (slack_token)
- payload = {"username": "devopsbot", "text": body, "icon_emoji": ":robot_face:"}
+ payload = {"username": "clowder", "text": body, "icon_emoji": ":clowder:"}
requests.post(url, json=payload)
+ slack_sent = True
send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
|
Rename bot and prevent channel spamming
|
## Code Before:
import os
import requests
from django.core.mail import send_mail
from clowder_account.models import ClowderUser
ADMIN_EMAIL = '[email protected]'
def send_alert(company, name):
for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True):
subject = 'FAILURE: %s' % (name)
body = subject
if user.company_id == 86:
slack_token = os.getenv('PARKME_SLACK_TOKEN')
url = 'https://hooks.slack.com/services/%s' % (slack_token)
payload = {"username": "devopsbot", "text": body, "icon_emoji": ":robot_face:"}
requests.post(url, json=payload)
send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
## Instruction:
Rename bot and prevent channel spamming
## Code After:
import os
import requests
from django.core.mail import send_mail
from clowder_account.models import ClowderUser
ADMIN_EMAIL = '[email protected]'
def send_alert(company, name):
slack_sent = False
for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True):
subject = 'FAILURE: %s' % (name)
body = subject
if user.company_id == 86 and not slack_sent:
slack_token = os.getenv('PARKME_SLACK_TOKEN')
url = 'https://hooks.slack.com/services/%s' % (slack_token)
payload = {"username": "clowder", "text": body, "icon_emoji": ":clowder:"}
requests.post(url, json=payload)
slack_sent = True
send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
|
import os
import requests
from django.core.mail import send_mail
from clowder_account.models import ClowderUser
ADMIN_EMAIL = '[email protected]'
def send_alert(company, name):
+ slack_sent = False
for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True):
subject = 'FAILURE: %s' % (name)
body = subject
- if user.company_id == 86:
+ if user.company_id == 86 and not slack_sent:
? +++++++++++++++++++
slack_token = os.getenv('PARKME_SLACK_TOKEN')
url = 'https://hooks.slack.com/services/%s' % (slack_token)
- payload = {"username": "devopsbot", "text": body, "icon_emoji": ":robot_face:"}
? ^^^^^^^ ---------
+ payload = {"username": "clowder", "text": body, "icon_emoji": ":clowder:"}
? ++++ ^ ++++++
requests.post(url, json=payload)
+ slack_sent = True
send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
|
96e0f2621dafd691e4560afe9b59df21aad3d2a8
|
taskwiki/cache.py
|
taskwiki/cache.py
|
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
return task
def __iter__(self):
# iterated_cache = {
while self.cache.keys():
for key in list(self.cache.keys()):
task = self.cache[key]
if all([t.line_number not in self.cache.keys()
for t in task.add_dependencies]):
del self.cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
import copy
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.uuid_cache = dict()
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
if task.uuid:
self.uuid_cache[task.uuid] = task
return task
def __iter__(self):
iterated_cache = copy.copy(self.cache)
while iterated_cache.keys():
for key in list(iterated_cache.keys()):
task = iterated_cache[key]
if all([t.line_number not in iterated_cache.keys()
for t in task.add_dependencies]):
del iterated_cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
Index tasks by uuid as well as line number
|
Cache: Index tasks by uuid as well as line number
|
Python
|
mit
|
phha/taskwiki,Spirotot/taskwiki
|
+ import copy
import vim
from taskwiki.task import VimwikiTask
+
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
+ self.uuid_cache = dict()
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
+ if task.uuid:
+ self.uuid_cache[task.uuid] = task
+
return task
def __iter__(self):
- # iterated_cache = {
+ iterated_cache = copy.copy(self.cache)
- while self.cache.keys():
+ while iterated_cache.keys():
- for key in list(self.cache.keys()):
+ for key in list(iterated_cache.keys()):
- task = self.cache[key]
+ task = iterated_cache[key]
- if all([t.line_number not in self.cache.keys()
+ if all([t.line_number not in iterated_cache.keys()
for t in task.add_dependencies]):
- del self.cache[key]
+ del iterated_cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
Index tasks by uuid as well as line number
|
## Code Before:
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
return task
def __iter__(self):
# iterated_cache = {
while self.cache.keys():
for key in list(self.cache.keys()):
task = self.cache[key]
if all([t.line_number not in self.cache.keys()
for t in task.add_dependencies]):
del self.cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
## Instruction:
Index tasks by uuid as well as line number
## Code After:
import copy
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.uuid_cache = dict()
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
if task.uuid:
self.uuid_cache[task.uuid] = task
return task
def __iter__(self):
iterated_cache = copy.copy(self.cache)
while iterated_cache.keys():
for key in list(iterated_cache.keys()):
task = iterated_cache[key]
if all([t.line_number not in iterated_cache.keys()
for t in task.add_dependencies]):
del iterated_cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
+ import copy
import vim
from taskwiki.task import VimwikiTask
+
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
+ self.uuid_cache = dict()
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
+ if task.uuid:
+ self.uuid_cache[task.uuid] = task
+
return task
def __iter__(self):
- # iterated_cache = {
+ iterated_cache = copy.copy(self.cache)
- while self.cache.keys():
? ^ ^^^
+ while iterated_cache.keys():
? ^^ ^^^^^^
- for key in list(self.cache.keys()):
? ^ ^^^
+ for key in list(iterated_cache.keys()):
? ^^ ^^^^^^
- task = self.cache[key]
? ^ ^^^
+ task = iterated_cache[key]
? ^^ ^^^^^^
- if all([t.line_number not in self.cache.keys()
? ^ ^^^
+ if all([t.line_number not in iterated_cache.keys()
? ^^ ^^^^^^
for t in task.add_dependencies]):
- del self.cache[key]
? ^ ^^^
+ del iterated_cache[key]
? ^^ ^^^^^^
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
8ed94e1fb93252eed47239d8c6a5f28796802a36
|
src/cclib/__init__.py
|
src/cclib/__init__.py
|
__version__ = "1.3"
from . import parser
from . import progress
from . import method
from . import bridge
# The test module can be imported if it was installed with cclib.
try:
from . import test
except ImportError:
pass
|
# This file is part of cclib (http://cclib.sf.net), a library for parsing
# and interpreting the results of computational chemistry packages.
#
# Copyright (C) 2006-2014 the cclib development team
#
# The library is free software, distributed under the terms of
# the GNU Lesser General Public version 2.1 or later. You should have
# received a copy of the license along with cclib. You can also access
# the full license online at http://www.gnu.org/copyleft/lgpl.html.
__version__ = "1.3"
from . import parser
from . import progress
from . import method
from . import bridge
# The test module can be imported if it was installed with cclib.
try:
from . import test
except ImportError:
pass
|
Add a descriptive docstring to main cclib module
|
Add a descriptive docstring to main cclib module
|
Python
|
bsd-3-clause
|
berquist/cclib,jchodera/cclib,ghutchis/cclib,ben-albrecht/cclib,andersx/cclib,gaursagar/cclib,Clyde-fare/cclib,ghutchis/cclib,langner/cclib,andersx/cclib,cclib/cclib,Schamnad/cclib,ATenderholt/cclib,berquist/cclib,cclib/cclib,ATenderholt/cclib,langner/cclib,berquist/cclib,cclib/cclib,langner/cclib,gaursagar/cclib,jchodera/cclib,ben-albrecht/cclib,Schamnad/cclib,Clyde-fare/cclib
|
+
+ # This file is part of cclib (http://cclib.sf.net), a library for parsing
+ # and interpreting the results of computational chemistry packages.
+ #
+ # Copyright (C) 2006-2014 the cclib development team
+ #
+ # The library is free software, distributed under the terms of
+ # the GNU Lesser General Public version 2.1 or later. You should have
+ # received a copy of the license along with cclib. You can also access
+ # the full license online at http://www.gnu.org/copyleft/lgpl.html.
__version__ = "1.3"
from . import parser
from . import progress
from . import method
from . import bridge
# The test module can be imported if it was installed with cclib.
try:
from . import test
except ImportError:
pass
|
Add a descriptive docstring to main cclib module
|
## Code Before:
__version__ = "1.3"
from . import parser
from . import progress
from . import method
from . import bridge
# The test module can be imported if it was installed with cclib.
try:
from . import test
except ImportError:
pass
## Instruction:
Add a descriptive docstring to main cclib module
## Code After:
# This file is part of cclib (http://cclib.sf.net), a library for parsing
# and interpreting the results of computational chemistry packages.
#
# Copyright (C) 2006-2014 the cclib development team
#
# The library is free software, distributed under the terms of
# the GNU Lesser General Public version 2.1 or later. You should have
# received a copy of the license along with cclib. You can also access
# the full license online at http://www.gnu.org/copyleft/lgpl.html.
__version__ = "1.3"
from . import parser
from . import progress
from . import method
from . import bridge
# The test module can be imported if it was installed with cclib.
try:
from . import test
except ImportError:
pass
|
+
+ # This file is part of cclib (http://cclib.sf.net), a library for parsing
+ # and interpreting the results of computational chemistry packages.
+ #
+ # Copyright (C) 2006-2014 the cclib development team
+ #
+ # The library is free software, distributed under the terms of
+ # the GNU Lesser General Public version 2.1 or later. You should have
+ # received a copy of the license along with cclib. You can also access
+ # the full license online at http://www.gnu.org/copyleft/lgpl.html.
__version__ = "1.3"
from . import parser
from . import progress
from . import method
from . import bridge
# The test module can be imported if it was installed with cclib.
try:
from . import test
except ImportError:
pass
|
ba42df4296a02396e823ee9692fb84eb0deb8b7c
|
corehq/messaging/smsbackends/start_enterprise/views.py
|
corehq/messaging/smsbackends/start_enterprise/views.py
|
from __future__ import absolute_import
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
from __future__ import absolute_import
import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
Add logging to delivery receipt view
|
Add logging to delivery receipt view
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import
+ import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
+ logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
+
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
Add logging to delivery receipt view
|
## Code Before:
from __future__ import absolute_import
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
## Instruction:
Add logging to delivery receipt view
## Code After:
from __future__ import absolute_import
import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
from __future__ import absolute_import
+ import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
+ logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
+
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
5bf50c2f36e00004dac0bc9bd604ac99b77261df
|
rename_fotos/tests/test_rename_fotos.py
|
rename_fotos/tests/test_rename_fotos.py
|
import pytest
import ../__init__ as init
from selenium import webdriver
LOCAL_INSTANCE = "127.0.0.1:5000"
def test_is_running():
init.is_running()
# Firefox
driver = webdriver.Firefox()
driver.get(LOCAl_INSTANCE)
assert driver.body == "Flask is running"
|
import pytest
import rename_fotos as rfapp
LOCAL_INSTANCE = "127.0.0.1:5000"
@pytest.fixture
def client():
rfapp.app.config['TESTING'] = True
with rfapp.app.test_client() as client:
with rfapp.app.app_context():
rfapp.init_db()
yield client
def test_is_running(client):
response = client.get('/')
assert "FAIL" in response.data
|
Switch to flask built in tests
|
Switch to flask built in tests
|
Python
|
mit
|
daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various
|
import pytest
- import ../__init__ as init
- from selenium import webdriver
+ import rename_fotos as rfapp
+
LOCAL_INSTANCE = "127.0.0.1:5000"
+ @pytest.fixture
+ def client():
+ rfapp.app.config['TESTING'] = True
+ with rfapp.app.test_client() as client:
+ with rfapp.app.app_context():
+ rfapp.init_db()
+ yield client
+
+
- def test_is_running():
+ def test_is_running(client):
- init.is_running()
+ response = client.get('/')
+ assert "FAIL" in response.data
- # Firefox
- driver = webdriver.Firefox()
- driver.get(LOCAl_INSTANCE)
+
- assert driver.body == "Flask is running"
-
-
|
Switch to flask built in tests
|
## Code Before:
import pytest
import ../__init__ as init
from selenium import webdriver
LOCAL_INSTANCE = "127.0.0.1:5000"
def test_is_running():
init.is_running()
# Firefox
driver = webdriver.Firefox()
driver.get(LOCAl_INSTANCE)
assert driver.body == "Flask is running"
## Instruction:
Switch to flask built in tests
## Code After:
import pytest
import rename_fotos as rfapp
LOCAL_INSTANCE = "127.0.0.1:5000"
@pytest.fixture
def client():
rfapp.app.config['TESTING'] = True
with rfapp.app.test_client() as client:
with rfapp.app.app_context():
rfapp.init_db()
yield client
def test_is_running(client):
response = client.get('/')
assert "FAIL" in response.data
|
import pytest
- import ../__init__ as init
- from selenium import webdriver
+ import rename_fotos as rfapp
+
LOCAL_INSTANCE = "127.0.0.1:5000"
+ @pytest.fixture
+ def client():
+ rfapp.app.config['TESTING'] = True
+ with rfapp.app.test_client() as client:
+ with rfapp.app.app_context():
+ rfapp.init_db()
+ yield client
+
+
- def test_is_running():
+ def test_is_running(client):
? ++++++
- init.is_running()
+ response = client.get('/')
+ assert "FAIL" in response.data
- # Firefox
- driver = webdriver.Firefox()
- driver.get(LOCAl_INSTANCE)
- assert driver.body == "Flask is running"
-
-
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.