commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
86066890322e3c3654946a49c8d1cd2e1a1c2980
|
celery/tests/test_backends/__init__.py
|
celery/tests/test_backends/__init__.py
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.pyredis import RedisBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("redis", RedisBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.
|
tests.backends: Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.
|
Python
|
bsd-3-clause
|
WoLpH/celery,ask/celery,mitsuhiko/celery,mitsuhiko/celery,WoLpH/celery,frac/celery,cbrepo/celery,cbrepo/celery,frac/celery,ask/celery
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
- from celery.backends.pyredis import RedisBackend
+ from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
- ("redis", RedisBackend)]
+ ("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.
|
## Code Before:
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.pyredis import RedisBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("redis", RedisBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
## Instruction:
Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.
## Code After:
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
// ... existing code ...
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
// ... modified code ...
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
// ... rest of the code ...
|
acab1af0e9bebeea011de1be472f298ddedd862b
|
src/pretix/control/views/global_settings.py
|
src/pretix/control/views/global_settings.py
|
from django.shortcuts import reverse
from django.views.generic import FormView
from pretix.control.forms.global_settings import GlobalSettingsForm
from pretix.control.permissions import AdministratorPermissionRequiredMixin
class GlobalSettingsView(AdministratorPermissionRequiredMixin, FormView):
template_name = 'pretixcontrol/global_settings.html'
form_class = GlobalSettingsForm
def form_valid(self, form):
form.save()
return super().form_valid(form)
def get_success_url(self):
return reverse('control:global-settings')
|
from django.contrib import messages
from django.shortcuts import reverse
from django.utils.translation import ugettext_lazy as _
from django.views.generic import FormView
from pretix.control.forms.global_settings import GlobalSettingsForm
from pretix.control.permissions import AdministratorPermissionRequiredMixin
class GlobalSettingsView(AdministratorPermissionRequiredMixin, FormView):
template_name = 'pretixcontrol/global_settings.html'
form_class = GlobalSettingsForm
def form_valid(self, form):
form.save()
messages.success(self.request, _('Your changes have been saved.'))
return super().form_valid(form)
def form_invalid(self, form):
messages.error(self.request, _('Your changes have not been saved, see below for errors.'))
return super().form_invalid(form)
def get_success_url(self):
return reverse('control:global-settings')
|
Add feedback to global settings
|
Add feedback to global settings
|
Python
|
apache-2.0
|
Flamacue/pretix,Flamacue/pretix,Flamacue/pretix,Flamacue/pretix
|
+ from django.contrib import messages
from django.shortcuts import reverse
+ from django.utils.translation import ugettext_lazy as _
from django.views.generic import FormView
from pretix.control.forms.global_settings import GlobalSettingsForm
from pretix.control.permissions import AdministratorPermissionRequiredMixin
class GlobalSettingsView(AdministratorPermissionRequiredMixin, FormView):
template_name = 'pretixcontrol/global_settings.html'
form_class = GlobalSettingsForm
def form_valid(self, form):
form.save()
+ messages.success(self.request, _('Your changes have been saved.'))
return super().form_valid(form)
+
+ def form_invalid(self, form):
+ messages.error(self.request, _('Your changes have not been saved, see below for errors.'))
+ return super().form_invalid(form)
def get_success_url(self):
return reverse('control:global-settings')
|
Add feedback to global settings
|
## Code Before:
from django.shortcuts import reverse
from django.views.generic import FormView
from pretix.control.forms.global_settings import GlobalSettingsForm
from pretix.control.permissions import AdministratorPermissionRequiredMixin
class GlobalSettingsView(AdministratorPermissionRequiredMixin, FormView):
template_name = 'pretixcontrol/global_settings.html'
form_class = GlobalSettingsForm
def form_valid(self, form):
form.save()
return super().form_valid(form)
def get_success_url(self):
return reverse('control:global-settings')
## Instruction:
Add feedback to global settings
## Code After:
from django.contrib import messages
from django.shortcuts import reverse
from django.utils.translation import ugettext_lazy as _
from django.views.generic import FormView
from pretix.control.forms.global_settings import GlobalSettingsForm
from pretix.control.permissions import AdministratorPermissionRequiredMixin
class GlobalSettingsView(AdministratorPermissionRequiredMixin, FormView):
template_name = 'pretixcontrol/global_settings.html'
form_class = GlobalSettingsForm
def form_valid(self, form):
form.save()
messages.success(self.request, _('Your changes have been saved.'))
return super().form_valid(form)
def form_invalid(self, form):
messages.error(self.request, _('Your changes have not been saved, see below for errors.'))
return super().form_invalid(form)
def get_success_url(self):
return reverse('control:global-settings')
|
// ... existing code ...
from django.contrib import messages
from django.shortcuts import reverse
from django.utils.translation import ugettext_lazy as _
from django.views.generic import FormView
// ... modified code ...
form.save()
messages.success(self.request, _('Your changes have been saved.'))
return super().form_valid(form)
def form_invalid(self, form):
messages.error(self.request, _('Your changes have not been saved, see below for errors.'))
return super().form_invalid(form)
// ... rest of the code ...
|
2d85ea5594cf1395fef6caafccc690ed9a5db472
|
crabigator/tests/test_wanikani.py
|
crabigator/tests/test_wanikani.py
|
"""Tests for crabigator.wanikani."""
from __future__ import print_function
from crabigator.wanikani import WaniKani, WaniKaniError
import os
from unittest import TestCase
# TestCase exposes too many public methods. Disable the pylint warning for it.
# pylint: disable=too-many-public-methods
class TestWaniKani(TestCase):
"""Unit test cases for the WaniKani API wrapper."""
@classmethod
def test_wanikani(cls):
"""Test all public methods in crabigator.wanikani."""
wanikani = WaniKani(os.environ['WANIKANI_API_KEY'])
print(wanikani.user_information)
print(wanikani.study_queue)
print(wanikani.level_progression)
print(wanikani.srs_distribution)
print(wanikani.recent_unlocks)
print(wanikani.get_recent_unlocks(3))
print(wanikani.critical_items)
print(wanikani.get_recent_unlocks(65))
print(wanikani.radicals)
print(wanikani.get_radicals([1, 2]))
print(wanikani.kanji)
print(wanikani.get_kanji([1, 2]))
print(wanikani.vocabulary)
print(wanikani.get_vocabulary([1, 2]))
try:
wanikani.get_vocabulary([9999])
except WaniKaniError as ex:
print(ex)
|
"""Tests for crabigator.wanikani."""
from __future__ import print_function
import os
from unittest import TestCase
from crabigator.wanikani import WaniKani, WaniKaniError
# TestCase exposes too many public methods. Disable the pylint warning for it.
# pylint: disable=too-many-public-methods
class TestWaniKani(TestCase):
"""Unit test cases for the WaniKani API wrapper."""
@classmethod
def test_wanikani(cls):
"""Test all public methods in crabigator.wanikani."""
wanikani = WaniKani(os.environ['WANIKANI_API_KEY'])
print(wanikani.user_information)
print(wanikani.study_queue)
print(wanikani.level_progression)
print(wanikani.srs_distribution)
print(wanikani.recent_unlocks)
print(wanikani.get_recent_unlocks(3))
print(wanikani.critical_items)
print(wanikani.get_recent_unlocks(65))
print(wanikani.radicals)
print(wanikani.get_radicals([1, 2]))
print(wanikani.kanji)
print(wanikani.get_kanji([1, 2]))
print(wanikani.vocabulary)
print(wanikani.get_vocabulary([1, 2]))
try:
wanikani.get_vocabulary([9999])
except WaniKaniError as ex:
print(ex)
|
Change import order to satisfy pylint.
|
Change import order to satisfy pylint.
|
Python
|
mit
|
jonesinator/crabigator
|
"""Tests for crabigator.wanikani."""
from __future__ import print_function
- from crabigator.wanikani import WaniKani, WaniKaniError
import os
from unittest import TestCase
+
+ from crabigator.wanikani import WaniKani, WaniKaniError
# TestCase exposes too many public methods. Disable the pylint warning for it.
# pylint: disable=too-many-public-methods
class TestWaniKani(TestCase):
"""Unit test cases for the WaniKani API wrapper."""
@classmethod
def test_wanikani(cls):
"""Test all public methods in crabigator.wanikani."""
wanikani = WaniKani(os.environ['WANIKANI_API_KEY'])
print(wanikani.user_information)
print(wanikani.study_queue)
print(wanikani.level_progression)
print(wanikani.srs_distribution)
print(wanikani.recent_unlocks)
print(wanikani.get_recent_unlocks(3))
print(wanikani.critical_items)
print(wanikani.get_recent_unlocks(65))
print(wanikani.radicals)
print(wanikani.get_radicals([1, 2]))
print(wanikani.kanji)
print(wanikani.get_kanji([1, 2]))
print(wanikani.vocabulary)
print(wanikani.get_vocabulary([1, 2]))
try:
wanikani.get_vocabulary([9999])
except WaniKaniError as ex:
print(ex)
|
Change import order to satisfy pylint.
|
## Code Before:
"""Tests for crabigator.wanikani."""
from __future__ import print_function
from crabigator.wanikani import WaniKani, WaniKaniError
import os
from unittest import TestCase
# TestCase exposes too many public methods. Disable the pylint warning for it.
# pylint: disable=too-many-public-methods
class TestWaniKani(TestCase):
"""Unit test cases for the WaniKani API wrapper."""
@classmethod
def test_wanikani(cls):
"""Test all public methods in crabigator.wanikani."""
wanikani = WaniKani(os.environ['WANIKANI_API_KEY'])
print(wanikani.user_information)
print(wanikani.study_queue)
print(wanikani.level_progression)
print(wanikani.srs_distribution)
print(wanikani.recent_unlocks)
print(wanikani.get_recent_unlocks(3))
print(wanikani.critical_items)
print(wanikani.get_recent_unlocks(65))
print(wanikani.radicals)
print(wanikani.get_radicals([1, 2]))
print(wanikani.kanji)
print(wanikani.get_kanji([1, 2]))
print(wanikani.vocabulary)
print(wanikani.get_vocabulary([1, 2]))
try:
wanikani.get_vocabulary([9999])
except WaniKaniError as ex:
print(ex)
## Instruction:
Change import order to satisfy pylint.
## Code After:
"""Tests for crabigator.wanikani."""
from __future__ import print_function
import os
from unittest import TestCase
from crabigator.wanikani import WaniKani, WaniKaniError
# TestCase exposes too many public methods. Disable the pylint warning for it.
# pylint: disable=too-many-public-methods
class TestWaniKani(TestCase):
"""Unit test cases for the WaniKani API wrapper."""
@classmethod
def test_wanikani(cls):
"""Test all public methods in crabigator.wanikani."""
wanikani = WaniKani(os.environ['WANIKANI_API_KEY'])
print(wanikani.user_information)
print(wanikani.study_queue)
print(wanikani.level_progression)
print(wanikani.srs_distribution)
print(wanikani.recent_unlocks)
print(wanikani.get_recent_unlocks(3))
print(wanikani.critical_items)
print(wanikani.get_recent_unlocks(65))
print(wanikani.radicals)
print(wanikani.get_radicals([1, 2]))
print(wanikani.kanji)
print(wanikani.get_kanji([1, 2]))
print(wanikani.vocabulary)
print(wanikani.get_vocabulary([1, 2]))
try:
wanikani.get_vocabulary([9999])
except WaniKaniError as ex:
print(ex)
|
# ... existing code ...
from __future__ import print_function
import os
# ... modified code ...
from unittest import TestCase
from crabigator.wanikani import WaniKani, WaniKaniError
# ... rest of the code ...
|
f3cdd316f9e0859f77389c68b073134a6076374b
|
ppp_datamodel_notation_parser/requesthandler.py
|
ppp_datamodel_notation_parser/requesthandler.py
|
"""Request handler of the module."""
from functools import partial
from ppp_datamodel import Sentence, TraceItem, Response
from ppp_datamodel.parsers import parse_triples, ParseError
def tree_to_response(measures, trace, tree):
trace = trace + [TraceItem('DatamodelNotationParser',
tree, measures)]
return Response('en', tree, measures, trace)
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
try:
forest = parse_triples(self.request.tree.value)
except ParseError:
return []
measures = {'accuracy': 1, 'relevance': 0.5}
return map(partial(tree_to_response, measures, self.request.trace),
forest)
|
"""Request handler of the module."""
from functools import partial
from ppp_datamodel import Sentence, TraceItem, Response
from ppp_datamodel.parsers import parse_triples, ParseError
def tree_to_response(tree, measures, trace):
trace = trace + [TraceItem('DatamodelNotationParser',
tree, measures)]
return Response('en', tree, measures, trace)
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
try:
tree = parse_triples(self.request.tree.value)
except ParseError:
return []
measures = {'accuracy': 1, 'relevance': 0.5}
return [tree_to_response(tree, measures, self.request.trace)]
|
Fix compatibility with new parser.
|
Fix compatibility with new parser.
|
Python
|
mit
|
ProjetPP/PPP-DatamodelNotationParser,ProjetPP/PPP-DatamodelNotationParser
|
"""Request handler of the module."""
from functools import partial
from ppp_datamodel import Sentence, TraceItem, Response
from ppp_datamodel.parsers import parse_triples, ParseError
- def tree_to_response(measures, trace, tree):
+ def tree_to_response(tree, measures, trace):
trace = trace + [TraceItem('DatamodelNotationParser',
tree, measures)]
return Response('en', tree, measures, trace)
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
try:
- forest = parse_triples(self.request.tree.value)
+ tree = parse_triples(self.request.tree.value)
except ParseError:
return []
measures = {'accuracy': 1, 'relevance': 0.5}
- return map(partial(tree_to_response, measures, self.request.trace),
+ return [tree_to_response(tree, measures, self.request.trace)]
- forest)
|
Fix compatibility with new parser.
|
## Code Before:
"""Request handler of the module."""
from functools import partial
from ppp_datamodel import Sentence, TraceItem, Response
from ppp_datamodel.parsers import parse_triples, ParseError
def tree_to_response(measures, trace, tree):
trace = trace + [TraceItem('DatamodelNotationParser',
tree, measures)]
return Response('en', tree, measures, trace)
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
try:
forest = parse_triples(self.request.tree.value)
except ParseError:
return []
measures = {'accuracy': 1, 'relevance': 0.5}
return map(partial(tree_to_response, measures, self.request.trace),
forest)
## Instruction:
Fix compatibility with new parser.
## Code After:
"""Request handler of the module."""
from functools import partial
from ppp_datamodel import Sentence, TraceItem, Response
from ppp_datamodel.parsers import parse_triples, ParseError
def tree_to_response(tree, measures, trace):
trace = trace + [TraceItem('DatamodelNotationParser',
tree, measures)]
return Response('en', tree, measures, trace)
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
try:
tree = parse_triples(self.request.tree.value)
except ParseError:
return []
measures = {'accuracy': 1, 'relevance': 0.5}
return [tree_to_response(tree, measures, self.request.trace)]
|
# ... existing code ...
def tree_to_response(tree, measures, trace):
trace = trace + [TraceItem('DatamodelNotationParser',
# ... modified code ...
try:
tree = parse_triples(self.request.tree.value)
except ParseError:
...
measures = {'accuracy': 1, 'relevance': 0.5}
return [tree_to_response(tree, measures, self.request.trace)]
# ... rest of the code ...
|
e9171e8d77b457e2c96fca37c89d68c518bec5f7
|
src/urllib3/util/util.py
|
src/urllib3/util/util.py
|
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
|
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
|
Bring coverage back to 100%
|
Bring coverage back to 100%
All calls to reraise() are in branches where value is truthy, so we
can't reach that code.
|
Python
|
mit
|
sigmavirus24/urllib3,sigmavirus24/urllib3,urllib3/urllib3,urllib3/urllib3
|
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
- if value is None:
- value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
|
Bring coverage back to 100%
|
## Code Before:
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
## Instruction:
Bring coverage back to 100%
## Code After:
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
|
# ... existing code ...
try:
if value.__traceback__ is not tb:
# ... rest of the code ...
|
d2fe3bbf4f97ef5ece4c6c69e531e261eeed75b1
|
stoq/plugins/archiver.py
|
stoq/plugins/archiver.py
|
from abc import abstractmethod
from typing import Optional
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
from stoq.plugins import BasePlugin
class ArchiverPlugin(BasePlugin):
@abstractmethod
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
pass
def get(self, task: str) -> Optional[Payload]:
pass
|
from abc import abstractmethod
from typing import Optional
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
from stoq.plugins import BasePlugin
class ArchiverPlugin(BasePlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
pass
def get(self, task: str) -> Optional[Payload]:
pass
|
Remove @abstracmethod from archive method as well
|
Remove @abstracmethod from archive method as well
|
Python
|
apache-2.0
|
PUNCH-Cyber/stoq
|
from abc import abstractmethod
from typing import Optional
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
from stoq.plugins import BasePlugin
class ArchiverPlugin(BasePlugin):
- @abstractmethod
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
pass
def get(self, task: str) -> Optional[Payload]:
pass
|
Remove @abstracmethod from archive method as well
|
## Code Before:
from abc import abstractmethod
from typing import Optional
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
from stoq.plugins import BasePlugin
class ArchiverPlugin(BasePlugin):
@abstractmethod
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
pass
def get(self, task: str) -> Optional[Payload]:
pass
## Instruction:
Remove @abstracmethod from archive method as well
## Code After:
from abc import abstractmethod
from typing import Optional
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
from stoq.plugins import BasePlugin
class ArchiverPlugin(BasePlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
pass
def get(self, task: str) -> Optional[Payload]:
pass
|
...
class ArchiverPlugin(BasePlugin):
def archive(
...
|
a06f586ba95148643561122f051087db7b63fecb
|
registries/views.py
|
registries/views.py
|
from django.shortcuts import render
from django.conf import settings
from django.http import HttpResponse
from rest_framework.generics import ListAPIView
from registries.models import Organization
from registries.serializers import DrillerListSerializer
class APIDrillerListView(ListAPIView):
queryset = Organization.objects.all()
serializer_class = DrillerListSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
|
from django.shortcuts import render
from django.conf import settings
from django.http import HttpResponse
from rest_framework.generics import ListAPIView
from registries.models import Organization
from registries.serializers import DrillerListSerializer
class APIDrillerListView(ListAPIView):
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerListSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
|
Add prefetch to reduce queries on province_state
|
Add prefetch to reduce queries on province_state
|
Python
|
apache-2.0
|
rstens/gwells,rstens/gwells,rstens/gwells,bcgov/gwells,bcgov/gwells,rstens/gwells,bcgov/gwells,bcgov/gwells
|
from django.shortcuts import render
from django.conf import settings
from django.http import HttpResponse
from rest_framework.generics import ListAPIView
from registries.models import Organization
from registries.serializers import DrillerListSerializer
class APIDrillerListView(ListAPIView):
- queryset = Organization.objects.all()
+ queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerListSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
+
|
Add prefetch to reduce queries on province_state
|
## Code Before:
from django.shortcuts import render
from django.conf import settings
from django.http import HttpResponse
from rest_framework.generics import ListAPIView
from registries.models import Organization
from registries.serializers import DrillerListSerializer
class APIDrillerListView(ListAPIView):
queryset = Organization.objects.all()
serializer_class = DrillerListSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
## Instruction:
Add prefetch to reduce queries on province_state
## Code After:
from django.shortcuts import render
from django.conf import settings
from django.http import HttpResponse
from rest_framework.generics import ListAPIView
from registries.models import Organization
from registries.serializers import DrillerListSerializer
class APIDrillerListView(ListAPIView):
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerListSerializer
# Create your views here.
def index(request):
return HttpResponse("TEST: Driller Register app home index.")
|
// ... existing code ...
class APIDrillerListView(ListAPIView):
queryset = Organization.objects.all().select_related('province_state')
serializer_class = DrillerListSerializer
// ... rest of the code ...
|
d0ac312de9b48a78f92f9eb09e048131578483f5
|
giles/utils.py
|
giles/utils.py
|
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
|
class Struct(object):
# Empty class, useful for making "structs."
pass
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
|
Add my classic Struct "class."
|
Add my classic Struct "class."
That's right, I embrace the lazy.
|
Python
|
agpl-3.0
|
sunfall/giles
|
+
+ class Struct(object):
+ # Empty class, useful for making "structs."
+ pass
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
|
Add my classic Struct "class."
|
## Code Before:
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
## Instruction:
Add my classic Struct "class."
## Code After:
class Struct(object):
# Empty class, useful for making "structs."
pass
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
|
# ... existing code ...
class Struct(object):
# Empty class, useful for making "structs."
pass
# ... rest of the code ...
|
3701ab7e372d73c2076988954dabff82f0f16557
|
build/adama-app/adama-package/adama/store.py
|
build/adama-app/adama-package/adama/store.py
|
import collections
import pickle
import redis
from .serf import node
class Store(collections.MutableMapping):
def __init__(self, db=0):
host, port = node(role='redis', port=6379)
self._db = redis.StrictRedis(host=host, port=port, db=db)
def __getitem__(self, key):
obj = self._db.get(key)
if obj is None:
raise KeyError('"{}" not found'.format(key))
return pickle.loads(obj)
def __setitem__(self, key, value):
obj = pickle.dumps(value)
self._db.set(key, obj)
def __delitem__(self, key):
self._db.delete(key)
def __iter__(self):
return self._db.scan_iter()
def __len__(self):
return self._db.dbsize()
store = Store()
|
import collections
import pickle
import redis
from .tools import location
class Store(collections.MutableMapping):
def __init__(self, db=0):
host, port = location('redis', 6379)
self._db = redis.StrictRedis(host=host, port=port, db=db)
def __getitem__(self, key):
obj = self._db.get(key)
if obj is None:
raise KeyError('"{}" not found'.format(key))
return pickle.loads(obj)
def __setitem__(self, key, value):
obj = pickle.dumps(value)
self._db.set(key, obj)
def __delitem__(self, key):
self._db.delete(key)
def __iter__(self):
return self._db.scan_iter()
def __len__(self):
return self._db.dbsize()
store = Store()
|
Store is using serfnode service discovery
|
Store is using serfnode service discovery
|
Python
|
mit
|
waltermoreira/adama-app,waltermoreira/adama-app,waltermoreira/adama-app
|
import collections
import pickle
import redis
- from .serf import node
+ from .tools import location
class Store(collections.MutableMapping):
def __init__(self, db=0):
- host, port = node(role='redis', port=6379)
+ host, port = location('redis', 6379)
self._db = redis.StrictRedis(host=host, port=port, db=db)
def __getitem__(self, key):
obj = self._db.get(key)
if obj is None:
raise KeyError('"{}" not found'.format(key))
return pickle.loads(obj)
def __setitem__(self, key, value):
obj = pickle.dumps(value)
self._db.set(key, obj)
def __delitem__(self, key):
self._db.delete(key)
def __iter__(self):
return self._db.scan_iter()
def __len__(self):
return self._db.dbsize()
store = Store()
|
Store is using serfnode service discovery
|
## Code Before:
import collections
import pickle
import redis
from .serf import node
class Store(collections.MutableMapping):
def __init__(self, db=0):
host, port = node(role='redis', port=6379)
self._db = redis.StrictRedis(host=host, port=port, db=db)
def __getitem__(self, key):
obj = self._db.get(key)
if obj is None:
raise KeyError('"{}" not found'.format(key))
return pickle.loads(obj)
def __setitem__(self, key, value):
obj = pickle.dumps(value)
self._db.set(key, obj)
def __delitem__(self, key):
self._db.delete(key)
def __iter__(self):
return self._db.scan_iter()
def __len__(self):
return self._db.dbsize()
store = Store()
## Instruction:
Store is using serfnode service discovery
## Code After:
import collections
import pickle
import redis
from .tools import location
class Store(collections.MutableMapping):
def __init__(self, db=0):
host, port = location('redis', 6379)
self._db = redis.StrictRedis(host=host, port=port, db=db)
def __getitem__(self, key):
obj = self._db.get(key)
if obj is None:
raise KeyError('"{}" not found'.format(key))
return pickle.loads(obj)
def __setitem__(self, key, value):
obj = pickle.dumps(value)
self._db.set(key, obj)
def __delitem__(self, key):
self._db.delete(key)
def __iter__(self):
return self._db.scan_iter()
def __len__(self):
return self._db.dbsize()
store = Store()
|
...
from .tools import location
...
def __init__(self, db=0):
host, port = location('redis', 6379)
self._db = redis.StrictRedis(host=host, port=port, db=db)
...
|
d65643e1bb74210a458b370aca5343f5c7059022
|
wm_metrics/period.py
|
wm_metrics/period.py
|
"""Representation of a period of time."""
class Period(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __repr__(self):
return "%s-%s" % (self.start, self.end)
|
"""Representation of a period of time."""
class Period(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __repr__(self):
return "%s-%s" % (self.start, self.end)
def __eq__(self, other):
return ((other.start == self.start) and
(other.end == self.end))
|
Add __eq__ method to Period object
|
Add __eq__ method to Period object
Ultimately we probably want to reuse Python objects
like timestamps.
|
Python
|
mit
|
Commonists/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics
|
"""Representation of a period of time."""
class Period(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __repr__(self):
return "%s-%s" % (self.start, self.end)
+ def __eq__(self, other):
+ return ((other.start == self.start) and
+ (other.end == self.end))
+
|
Add __eq__ method to Period object
|
## Code Before:
"""Representation of a period of time."""
class Period(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __repr__(self):
return "%s-%s" % (self.start, self.end)
## Instruction:
Add __eq__ method to Period object
## Code After:
"""Representation of a period of time."""
class Period(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __repr__(self):
return "%s-%s" % (self.start, self.end)
def __eq__(self, other):
return ((other.start == self.start) and
(other.end == self.end))
|
...
return "%s-%s" % (self.start, self.end)
def __eq__(self, other):
return ((other.start == self.start) and
(other.end == self.end))
...
|
0ec2c192a3f8428bb487add6a70aef100f02c036
|
segpy/portability.py
|
segpy/portability.py
|
import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
long_int = int
else:
long_int = long
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
|
import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
|
Remove Python 2.7 crutch for int/long
|
Remove Python 2.7 crutch for int/long
|
Python
|
agpl-3.0
|
hohogpb/segpy,abingham/segpy,kjellkongsvik/segpy,Kramer477/segpy,kwinkunks/segpy,stevejpurves/segpy,asbjorn/segpy
|
import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
-
-
- if sys.version_info >= (3, 0):
- long_int = int
- else:
- long_int = long
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
|
Remove Python 2.7 crutch for int/long
|
## Code Before:
import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
long_int = int
else:
long_int = long
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
## Instruction:
Remove Python 2.7 crutch for int/long
## Code After:
import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
|
# ... existing code ...
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
# ... rest of the code ...
|
628d777e3751ec8e38f1b98f558799b28cda1569
|
src/services/TemperatureMonitor/TemperatureMonitor.py
|
src/services/TemperatureMonitor/TemperatureMonitor.py
|
import sys
from src.TemperatureMonitor import TemperatureMonitor
from src.temperature import TemperatureSensor
SENSOR_ADDRESS = 0x48
tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS), observers=sys.argv[1:])
tempMonitor.run()
|
import sys
from src.TemperatureMonitor import TemperatureMonitor
from src.temperature import TemperatureSensor
import argparse
parser = argparse.ArgumentParser(description='Broadcast temperatures to URLs')
parser.add_argument('observers', metavar='N', type=str, nargs='+',
help='the observers', default=())
parser.add_argument("-i", "--interval", type=int, help="the period between testing the temperature", default=60)
parser.add_argument("-s", "--smoothing", type=int, help="the number of samples to average when broadcasting a result", default=60)
args = parser.parse_args()
SENSOR_ADDRESS = 0x48
tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS),
interval=args.interval,
smoothing=args.smoothing,
observers=args.observers)
tempMonitor.run()
|
Allow Control of Interval and Observers
|
Allow Control of Interval and Observers
|
Python
|
mit
|
IAPark/PITherm
|
import sys
from src.TemperatureMonitor import TemperatureMonitor
from src.temperature import TemperatureSensor
+ import argparse
+
+ parser = argparse.ArgumentParser(description='Broadcast temperatures to URLs')
+ parser.add_argument('observers', metavar='N', type=str, nargs='+',
+ help='the observers', default=())
+ parser.add_argument("-i", "--interval", type=int, help="the period between testing the temperature", default=60)
+ parser.add_argument("-s", "--smoothing", type=int, help="the number of samples to average when broadcasting a result", default=60)
+
+ args = parser.parse_args()
+
SENSOR_ADDRESS = 0x48
- tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS), observers=sys.argv[1:])
+ tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS),
+ interval=args.interval,
+ smoothing=args.smoothing,
+ observers=args.observers)
tempMonitor.run()
|
Allow Control of Interval and Observers
|
## Code Before:
import sys
from src.TemperatureMonitor import TemperatureMonitor
from src.temperature import TemperatureSensor
SENSOR_ADDRESS = 0x48
tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS), observers=sys.argv[1:])
tempMonitor.run()
## Instruction:
Allow Control of Interval and Observers
## Code After:
import sys
from src.TemperatureMonitor import TemperatureMonitor
from src.temperature import TemperatureSensor
import argparse
parser = argparse.ArgumentParser(description='Broadcast temperatures to URLs')
parser.add_argument('observers', metavar='N', type=str, nargs='+',
help='the observers', default=())
parser.add_argument("-i", "--interval", type=int, help="the period between testing the temperature", default=60)
parser.add_argument("-s", "--smoothing", type=int, help="the number of samples to average when broadcasting a result", default=60)
args = parser.parse_args()
SENSOR_ADDRESS = 0x48
tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS),
interval=args.interval,
smoothing=args.smoothing,
observers=args.observers)
tempMonitor.run()
|
# ... existing code ...
import argparse
parser = argparse.ArgumentParser(description='Broadcast temperatures to URLs')
parser.add_argument('observers', metavar='N', type=str, nargs='+',
help='the observers', default=())
parser.add_argument("-i", "--interval", type=int, help="the period between testing the temperature", default=60)
parser.add_argument("-s", "--smoothing", type=int, help="the number of samples to average when broadcasting a result", default=60)
args = parser.parse_args()
SENSOR_ADDRESS = 0x48
# ... modified code ...
tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS),
interval=args.interval,
smoothing=args.smoothing,
observers=args.observers)
tempMonitor.run()
# ... rest of the code ...
|
9ff346834a39605a707d66d4a2c6e3dc20dcdd78
|
markov_chain.py
|
markov_chain.py
|
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
|
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def add_text_collection(self, text_col, separator=" "):
""" Adds a collection of text strings to the markov chain """
for line in text_col:
if line not in ["", "\n", None]:
self.add_text(line, separator)
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
|
Add Markov Chain representation class
|
Add Markov Chain representation class
|
Python
|
mit
|
iluxonchik/lyricist
|
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
-
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
+ def add_text_collection(self, text_col, separator=" "):
+ """ Adds a collection of text strings to the markov chain """
+ for line in text_col:
+ if line not in ["", "\n", None]:
+ self.add_text(line, separator)
+
+
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
|
Add Markov Chain representation class
|
## Code Before:
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
## Instruction:
Add Markov Chain representation class
## Code After:
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def add_text_collection(self, text_col, separator=" "):
""" Adds a collection of text strings to the markov chain """
for line in text_col:
if line not in ["", "\n", None]:
self.add_text(line, separator)
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
|
// ... existing code ...
self._states_map = {}
if text is not None:
// ... modified code ...
def add_text_collection(self, text_col, separator=" "):
""" Adds a collection of text strings to the markov chain """
for line in text_col:
if line not in ["", "\n", None]:
self.add_text(line, separator)
def get_word(self, key):
// ... rest of the code ...
|
605340f9c18f1591846e0a9b5f9c983c940d80c9
|
tests/models/test_repository.py
|
tests/models/test_repository.py
|
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
|
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
def test_if_we_find_only_directories_should_return_none(self):
mocked_users = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_path.walk.return_value = [mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo, None)
|
Test if we look after a non-existing repo
|
Test if we look after a non-existing repo
|
Python
|
bsd-2-clause
|
PressLabs/pyolite,shawkinsl/pyolite
|
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
+ def test_if_we_find_only_directories_should_return_none(self):
+ mocked_users = MagicMock()
+ mocked_dir = MagicMock()
+ mocked_path = MagicMock()
+
+ mocked_dir.isdir.return_value = True
+
+ mocked_path.walk.return_value = [mocked_dir]
+
+ with patch.multiple('pyolite.models.repository',
+ Path=MagicMock(return_value=mocked_path),
+ ListUsers=MagicMock(return_value=mocked_users)):
+ repo = Repository.get_by_name('new_one', 'simple_path', 'git')
+ eq_(repo, None)
+
|
Test if we look after a non-existing repo
|
## Code Before:
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
## Instruction:
Test if we look after a non-existing repo
## Code After:
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
def test_if_we_find_only_directories_should_return_none(self):
mocked_users = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_path.walk.return_value = [mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo, None)
|
...
eq_(repo.users, mocked_users)
def test_if_we_find_only_directories_should_return_none(self):
mocked_users = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_path.walk.return_value = [mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo, None)
...
|
9178e4d6a8fb54c6124c192765a9ff8cc3a582c6
|
docs/recipe_bridge.py
|
docs/recipe_bridge.py
|
import time
from picraft import World, Vector, Block
world = World(ignore_errors=True)
world.say('Auto-bridge active')
last_pos = None
while True:
this_pos = world.player.pos
if last_pos is not None:
# Has the player moved more than 0.2 units in a horizontal direction?
movement = (this_pos - last_pos).replace(y=0.0)
if movement.magnitude > 0.1:
# Find the next tile they're going to step on
next_pos = (this_pos + movement.unit).floor() - Vector(y=1)
if world.blocks[next_pos] == Block('air'):
world.blocks[next_pos] = Block('diamond_block')
last_pos = this_pos
time.sleep(0.01)
|
from __future__ import unicode_literals
import time
from picraft import World, Vector, Block
from collections import deque
world = World(ignore_errors=True)
world.say('Auto-bridge active')
try:
bridge = deque()
last_pos = None
while True:
this_pos = world.player.pos
if last_pos is not None:
# Has the player moved more than 0.1 units in a horizontal direction?
movement = (this_pos - last_pos).replace(y=0.0)
if movement.magnitude > 0.1:
# Find the next tile they're going to step on
next_pos = (this_pos + movement.unit).floor() - Vector(y=1)
if world.blocks[next_pos] == Block('air'):
with world.connection.batch_start():
bridge.append(next_pos)
world.blocks[next_pos] = Block('diamond_block')
while len(bridge) > 10:
world.blocks[bridge.popleft()] = Block('air')
last_pos = this_pos
time.sleep(0.01)
except KeyboardInterrupt:
world.say('Auto-bridge deactivated')
with world.connection.batch_start():
while bridge:
world.blocks[bridge.popleft()] = Block('air')
|
Update bridge recipe to limit length
|
Update bridge recipe to limit length
Also removes bridge at the end
|
Python
|
bsd-3-clause
|
waveform80/picraft,radames/picraft
|
+
+ from __future__ import unicode_literals
import time
from picraft import World, Vector, Block
+ from collections import deque
world = World(ignore_errors=True)
world.say('Auto-bridge active')
-
+ try:
+ bridge = deque()
- last_pos = None
+ last_pos = None
- while True:
+ while True:
- this_pos = world.player.pos
+ this_pos = world.player.pos
- if last_pos is not None:
+ if last_pos is not None:
- # Has the player moved more than 0.2 units in a horizontal direction?
+ # Has the player moved more than 0.1 units in a horizontal direction?
- movement = (this_pos - last_pos).replace(y=0.0)
+ movement = (this_pos - last_pos).replace(y=0.0)
- if movement.magnitude > 0.1:
+ if movement.magnitude > 0.1:
- # Find the next tile they're going to step on
+ # Find the next tile they're going to step on
- next_pos = (this_pos + movement.unit).floor() - Vector(y=1)
+ next_pos = (this_pos + movement.unit).floor() - Vector(y=1)
- if world.blocks[next_pos] == Block('air'):
+ if world.blocks[next_pos] == Block('air'):
+ with world.connection.batch_start():
+ bridge.append(next_pos)
- world.blocks[next_pos] = Block('diamond_block')
+ world.blocks[next_pos] = Block('diamond_block')
+ while len(bridge) > 10:
+ world.blocks[bridge.popleft()] = Block('air')
- last_pos = this_pos
+ last_pos = this_pos
- time.sleep(0.01)
+ time.sleep(0.01)
+ except KeyboardInterrupt:
+ world.say('Auto-bridge deactivated')
+ with world.connection.batch_start():
+ while bridge:
+ world.blocks[bridge.popleft()] = Block('air')
|
Update bridge recipe to limit length
|
## Code Before:
import time
from picraft import World, Vector, Block
world = World(ignore_errors=True)
world.say('Auto-bridge active')
last_pos = None
while True:
this_pos = world.player.pos
if last_pos is not None:
# Has the player moved more than 0.2 units in a horizontal direction?
movement = (this_pos - last_pos).replace(y=0.0)
if movement.magnitude > 0.1:
# Find the next tile they're going to step on
next_pos = (this_pos + movement.unit).floor() - Vector(y=1)
if world.blocks[next_pos] == Block('air'):
world.blocks[next_pos] = Block('diamond_block')
last_pos = this_pos
time.sleep(0.01)
## Instruction:
Update bridge recipe to limit length
## Code After:
from __future__ import unicode_literals
import time
from picraft import World, Vector, Block
from collections import deque
world = World(ignore_errors=True)
world.say('Auto-bridge active')
try:
bridge = deque()
last_pos = None
while True:
this_pos = world.player.pos
if last_pos is not None:
# Has the player moved more than 0.1 units in a horizontal direction?
movement = (this_pos - last_pos).replace(y=0.0)
if movement.magnitude > 0.1:
# Find the next tile they're going to step on
next_pos = (this_pos + movement.unit).floor() - Vector(y=1)
if world.blocks[next_pos] == Block('air'):
with world.connection.batch_start():
bridge.append(next_pos)
world.blocks[next_pos] = Block('diamond_block')
while len(bridge) > 10:
world.blocks[bridge.popleft()] = Block('air')
last_pos = this_pos
time.sleep(0.01)
except KeyboardInterrupt:
world.say('Auto-bridge deactivated')
with world.connection.batch_start():
while bridge:
world.blocks[bridge.popleft()] = Block('air')
|
...
from __future__ import unicode_literals
...
from picraft import World, Vector, Block
from collections import deque
...
world.say('Auto-bridge active')
try:
bridge = deque()
last_pos = None
while True:
this_pos = world.player.pos
if last_pos is not None:
# Has the player moved more than 0.1 units in a horizontal direction?
movement = (this_pos - last_pos).replace(y=0.0)
if movement.magnitude > 0.1:
# Find the next tile they're going to step on
next_pos = (this_pos + movement.unit).floor() - Vector(y=1)
if world.blocks[next_pos] == Block('air'):
with world.connection.batch_start():
bridge.append(next_pos)
world.blocks[next_pos] = Block('diamond_block')
while len(bridge) > 10:
world.blocks[bridge.popleft()] = Block('air')
last_pos = this_pos
time.sleep(0.01)
except KeyboardInterrupt:
world.say('Auto-bridge deactivated')
with world.connection.batch_start():
while bridge:
world.blocks[bridge.popleft()] = Block('air')
...
|
39a0094f87bf03229eacb81c5bc86b55c8893ceb
|
serving.py
|
serving.py
|
"""Extend werkzeug request handler to suit our needs."""
import time
from werkzeug.serving import BaseRequestHandler
class ShRequestHandler(BaseRequestHandler):
"""Extend werkzeug request handler to suit our needs."""
def handle(self):
self.shRequestStarted = time.time()
rv = super(ShRequestHandler, self).handle()
return rv
def send_response(self, *args, **kw):
self.shRequestProcessed = time.time()
super(ShRequestHandler, self).send_response(*args, **kw)
def log_request(self, code='-', size='-'):
duration = int((self.shRequestProcessed - self.shRequestStarted) * 1000)
self.log('info', '"{0}" {1} {2} [{3}ms]'.format(self.requestline, code, size, duration))
|
"""Extend werkzeug request handler to suit our needs."""
import time
from werkzeug.serving import BaseRequestHandler
class ShRequestHandler(BaseRequestHandler):
"""Extend werkzeug request handler to suit our needs."""
def handle(self):
self.shRequestStarted = time.time()
rv = super(ShRequestHandler, self).handle()
return rv
def send_response(self, *args, **kw):
self.shRequestProcessed = time.time()
super(ShRequestHandler, self).send_response(*args, **kw)
def log_request(self, code='-', size='-'):
duration = int((self.shRequestProcessed - self.shRequestStarted) * 1000)
self.log('info', u'"{0}" {1} {2} [{3}ms]'.format(self.requestline.replace('%', '%%'), code, size, duration))
|
Handle logging when a '%' character is in the URL.
|
Handle logging when a '%' character is in the URL.
|
Python
|
bsd-3-clause
|
Sendhub/flashk_util
|
"""Extend werkzeug request handler to suit our needs."""
import time
from werkzeug.serving import BaseRequestHandler
class ShRequestHandler(BaseRequestHandler):
"""Extend werkzeug request handler to suit our needs."""
def handle(self):
self.shRequestStarted = time.time()
rv = super(ShRequestHandler, self).handle()
return rv
def send_response(self, *args, **kw):
self.shRequestProcessed = time.time()
super(ShRequestHandler, self).send_response(*args, **kw)
def log_request(self, code='-', size='-'):
duration = int((self.shRequestProcessed - self.shRequestStarted) * 1000)
- self.log('info', '"{0}" {1} {2} [{3}ms]'.format(self.requestline, code, size, duration))
+ self.log('info', u'"{0}" {1} {2} [{3}ms]'.format(self.requestline.replace('%', '%%'), code, size, duration))
|
Handle logging when a '%' character is in the URL.
|
## Code Before:
"""Extend werkzeug request handler to suit our needs."""
import time
from werkzeug.serving import BaseRequestHandler
class ShRequestHandler(BaseRequestHandler):
"""Extend werkzeug request handler to suit our needs."""
def handle(self):
self.shRequestStarted = time.time()
rv = super(ShRequestHandler, self).handle()
return rv
def send_response(self, *args, **kw):
self.shRequestProcessed = time.time()
super(ShRequestHandler, self).send_response(*args, **kw)
def log_request(self, code='-', size='-'):
duration = int((self.shRequestProcessed - self.shRequestStarted) * 1000)
self.log('info', '"{0}" {1} {2} [{3}ms]'.format(self.requestline, code, size, duration))
## Instruction:
Handle logging when a '%' character is in the URL.
## Code After:
"""Extend werkzeug request handler to suit our needs."""
import time
from werkzeug.serving import BaseRequestHandler
class ShRequestHandler(BaseRequestHandler):
"""Extend werkzeug request handler to suit our needs."""
def handle(self):
self.shRequestStarted = time.time()
rv = super(ShRequestHandler, self).handle()
return rv
def send_response(self, *args, **kw):
self.shRequestProcessed = time.time()
super(ShRequestHandler, self).send_response(*args, **kw)
def log_request(self, code='-', size='-'):
duration = int((self.shRequestProcessed - self.shRequestStarted) * 1000)
self.log('info', u'"{0}" {1} {2} [{3}ms]'.format(self.requestline.replace('%', '%%'), code, size, duration))
|
# ... existing code ...
duration = int((self.shRequestProcessed - self.shRequestStarted) * 1000)
self.log('info', u'"{0}" {1} {2} [{3}ms]'.format(self.requestline.replace('%', '%%'), code, size, duration))
# ... rest of the code ...
|
3f31234454949e7dca3b91d9884568da57ab9fcd
|
conftest.py
|
conftest.py
|
import pytest
import json
import os.path
from fixture.application import Application
fixture = None
target = None
@pytest.fixture
def app(request):
global fixture
global target
browser = request.config.getoption("--browser")
if target is None:
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target"))
with open(config_file) as f:
target = json.load(f)
if fixture is None or not fixture.is_valid():
fixture = Application(browser=browser, base_url=target['baseUrl'])
fixture.session.ensure_login(username=target['username'], password=target['password'])
return fixture
@pytest.fixture(scope="session", autouse=True)
def stop(request):
def fin():
fixture.session.ensure_logout()
fixture.destroy()
request.addfinalizer(fin)
return fixture
def pytest_addoption(parser):
parser.addoption("--browser", action="store", default="firefox")
parser.addoption("--target", action="store", default="target.json")
|
import pytest
import json
import os.path
import importlib
import jsonpickle
from fixture.application import Application
fixture = None
target = None
@pytest.fixture
def app(request):
global fixture
global target
browser = request.config.getoption("--browser")
if target is None:
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target"))
with open(config_file) as f:
target = json.load(f)
if fixture is None or not fixture.is_valid():
fixture = Application(browser=browser, base_url=target['baseUrl'])
fixture.session.ensure_login(username=target['username'], password=target['password'])
return fixture
@pytest.fixture(scope="session", autouse=True)
def stop(request):
def fin():
fixture.session.ensure_logout()
fixture.destroy()
request.addfinalizer(fin)
return fixture
def pytest_addoption(parser):
parser.addoption("--browser", action="store", default="firefox")
parser.addoption("--target", action="store", default="target.json")
def pytest_generate_tests(metafunc):
for fixture in metafunc.fixturenames:
if fixture.startswith("data_"):
testdata = load_from_module(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
elif fixture.startswith("json_"):
testdata = load_from_json(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
def load_from_module(module):
return importlib.import_module("data.%s" % module).testdata
def load_from_json(file):
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f:
return jsonpickle.decode(f.read())
|
Add test data loading from file and test data parametrization
|
Add test data loading from file and test data parametrization
|
Python
|
apache-2.0
|
ujilia/python_training2,ujilia/python_training2,ujilia/python_training2
|
import pytest
import json
import os.path
+ import importlib
+ import jsonpickle
from fixture.application import Application
fixture = None
target = None
@pytest.fixture
def app(request):
global fixture
global target
browser = request.config.getoption("--browser")
if target is None:
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target"))
with open(config_file) as f:
target = json.load(f)
if fixture is None or not fixture.is_valid():
fixture = Application(browser=browser, base_url=target['baseUrl'])
fixture.session.ensure_login(username=target['username'], password=target['password'])
return fixture
@pytest.fixture(scope="session", autouse=True)
def stop(request):
def fin():
fixture.session.ensure_logout()
fixture.destroy()
request.addfinalizer(fin)
return fixture
def pytest_addoption(parser):
parser.addoption("--browser", action="store", default="firefox")
parser.addoption("--target", action="store", default="target.json")
+
+ def pytest_generate_tests(metafunc):
+ for fixture in metafunc.fixturenames:
+ if fixture.startswith("data_"):
+ testdata = load_from_module(fixture[5:])
+ metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
+ elif fixture.startswith("json_"):
+ testdata = load_from_json(fixture[5:])
+ metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
+
+
+ def load_from_module(module):
+ return importlib.import_module("data.%s" % module).testdata
+
+
+ def load_from_json(file):
+ with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f:
+ return jsonpickle.decode(f.read())
+
|
Add test data loading from file and test data parametrization
|
## Code Before:
import pytest
import json
import os.path
from fixture.application import Application
fixture = None
target = None
@pytest.fixture
def app(request):
global fixture
global target
browser = request.config.getoption("--browser")
if target is None:
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target"))
with open(config_file) as f:
target = json.load(f)
if fixture is None or not fixture.is_valid():
fixture = Application(browser=browser, base_url=target['baseUrl'])
fixture.session.ensure_login(username=target['username'], password=target['password'])
return fixture
@pytest.fixture(scope="session", autouse=True)
def stop(request):
def fin():
fixture.session.ensure_logout()
fixture.destroy()
request.addfinalizer(fin)
return fixture
def pytest_addoption(parser):
parser.addoption("--browser", action="store", default="firefox")
parser.addoption("--target", action="store", default="target.json")
## Instruction:
Add test data loading from file and test data parametrization
## Code After:
import pytest
import json
import os.path
import importlib
import jsonpickle
from fixture.application import Application
fixture = None
target = None
@pytest.fixture
def app(request):
global fixture
global target
browser = request.config.getoption("--browser")
if target is None:
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target"))
with open(config_file) as f:
target = json.load(f)
if fixture is None or not fixture.is_valid():
fixture = Application(browser=browser, base_url=target['baseUrl'])
fixture.session.ensure_login(username=target['username'], password=target['password'])
return fixture
@pytest.fixture(scope="session", autouse=True)
def stop(request):
def fin():
fixture.session.ensure_logout()
fixture.destroy()
request.addfinalizer(fin)
return fixture
def pytest_addoption(parser):
parser.addoption("--browser", action="store", default="firefox")
parser.addoption("--target", action="store", default="target.json")
def pytest_generate_tests(metafunc):
for fixture in metafunc.fixturenames:
if fixture.startswith("data_"):
testdata = load_from_module(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
elif fixture.startswith("json_"):
testdata = load_from_json(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
def load_from_module(module):
return importlib.import_module("data.%s" % module).testdata
def load_from_json(file):
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f:
return jsonpickle.decode(f.read())
|
// ... existing code ...
import os.path
import importlib
import jsonpickle
from fixture.application import Application
// ... modified code ...
parser.addoption("--target", action="store", default="target.json")
def pytest_generate_tests(metafunc):
for fixture in metafunc.fixturenames:
if fixture.startswith("data_"):
testdata = load_from_module(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
elif fixture.startswith("json_"):
testdata = load_from_json(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
def load_from_module(module):
return importlib.import_module("data.%s" % module).testdata
def load_from_json(file):
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f:
return jsonpickle.decode(f.read())
// ... rest of the code ...
|
cd0f4758bcb8eacab0a6a1f21f3c4287b2d24995
|
vumi/blinkenlights/heartbeat/__init__.py
|
vumi/blinkenlights/heartbeat/__init__.py
|
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
from vumi.blinkenlights.heartbeat.monitor import HeartBeatMonitor
__all__ = ["HeartBeatMessage", "HeartBeatPublisher", "HeartBeatMonitor"]
|
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
|
Resolve a cyclical dependency issue
|
Resolve a cyclical dependency issue
|
Python
|
bsd-3-clause
|
vishwaprakashmishra/xmatrix,TouK/vumi,harrissoerja/vumi,TouK/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi
|
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
- from vumi.blinkenlights.heartbeat.monitor import HeartBeatMonitor
+ __all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
- __all__ = ["HeartBeatMessage", "HeartBeatPublisher", "HeartBeatMonitor"]
-
|
Resolve a cyclical dependency issue
|
## Code Before:
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
from vumi.blinkenlights.heartbeat.monitor import HeartBeatMonitor
__all__ = ["HeartBeatMessage", "HeartBeatPublisher", "HeartBeatMonitor"]
## Instruction:
Resolve a cyclical dependency issue
## Code After:
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
|
...
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
...
|
fbd168ae6a2b2733bec9ffa1eec4c56fbfdbc97b
|
modoboa/admin/migrations/0002_migrate_from_modoboa_admin.py
|
modoboa/admin/migrations/0002_migrate_from_modoboa_admin.py
|
from __future__ import unicode_literals
from django.db import models, migrations
def rename_and_clean(apps, schema_editor):
"""Rename old content types if necessary, remove permissions."""
ContentType = apps.get_model("contenttypes", "ContentType")
for ct in ContentType.objects.filter(app_label="admin"):
try:
old_ct = ContentType.objects.get(
app_label="modoboa_admin", model=ct.model)
except ContentType.DoesNotExist:
continue
old_ct.app_label = "admin"
ct.delete()
old_ct.save()
# Remove DomainAlias permissions from DomainAdmins group
Group = apps.get_model("auth", "Group")
Permission = apps.get_model("auth", "Permission")
group = Group.objects.get(name="DomainAdmins")
ct = ContentType.objects.get(app_label="admin", model="domainalias")
for permission in Permission.objects.filter(content_type=ct):
group.permissions.remove(permission)
class Migration(migrations.Migration):
dependencies = [
('admin', '0001_initial'),
]
operations = [
migrations.RunPython(rename_and_clean),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
def rename_and_clean(apps, schema_editor):
"""Rename old content types if necessary, remove permissions."""
ContentType = apps.get_model("contenttypes", "ContentType")
for ct in ContentType.objects.filter(app_label="admin"):
try:
old_ct = ContentType.objects.get(
app_label="modoboa_admin", model=ct.model)
except ContentType.DoesNotExist:
continue
old_ct.app_label = "admin"
ct.delete()
old_ct.save()
# Remove DomainAlias permissions from DomainAdmins group
Group = apps.get_model("auth", "Group")
try:
group = Group.objects.get(name="DomainAdmins")
except Group.DoesNotExist:
return
Permission = apps.get_model("auth", "Permission")
ct = ContentType.objects.get(app_label="admin", model="domainalias")
for permission in Permission.objects.filter(content_type=ct):
group.permissions.remove(permission)
class Migration(migrations.Migration):
dependencies = [
('admin', '0001_initial'),
]
operations = [
migrations.RunPython(rename_and_clean),
]
|
Handle the fresh install case.
|
Handle the fresh install case.
|
Python
|
isc
|
modoboa/modoboa,bearstech/modoboa,carragom/modoboa,carragom/modoboa,tonioo/modoboa,bearstech/modoboa,bearstech/modoboa,bearstech/modoboa,tonioo/modoboa,carragom/modoboa,modoboa/modoboa,tonioo/modoboa,modoboa/modoboa,modoboa/modoboa
|
from __future__ import unicode_literals
from django.db import models, migrations
def rename_and_clean(apps, schema_editor):
"""Rename old content types if necessary, remove permissions."""
ContentType = apps.get_model("contenttypes", "ContentType")
for ct in ContentType.objects.filter(app_label="admin"):
try:
old_ct = ContentType.objects.get(
app_label="modoboa_admin", model=ct.model)
except ContentType.DoesNotExist:
continue
old_ct.app_label = "admin"
ct.delete()
old_ct.save()
# Remove DomainAlias permissions from DomainAdmins group
Group = apps.get_model("auth", "Group")
+ try:
+ group = Group.objects.get(name="DomainAdmins")
+ except Group.DoesNotExist:
+ return
Permission = apps.get_model("auth", "Permission")
- group = Group.objects.get(name="DomainAdmins")
ct = ContentType.objects.get(app_label="admin", model="domainalias")
for permission in Permission.objects.filter(content_type=ct):
group.permissions.remove(permission)
class Migration(migrations.Migration):
dependencies = [
('admin', '0001_initial'),
]
operations = [
migrations.RunPython(rename_and_clean),
]
|
Handle the fresh install case.
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
def rename_and_clean(apps, schema_editor):
"""Rename old content types if necessary, remove permissions."""
ContentType = apps.get_model("contenttypes", "ContentType")
for ct in ContentType.objects.filter(app_label="admin"):
try:
old_ct = ContentType.objects.get(
app_label="modoboa_admin", model=ct.model)
except ContentType.DoesNotExist:
continue
old_ct.app_label = "admin"
ct.delete()
old_ct.save()
# Remove DomainAlias permissions from DomainAdmins group
Group = apps.get_model("auth", "Group")
Permission = apps.get_model("auth", "Permission")
group = Group.objects.get(name="DomainAdmins")
ct = ContentType.objects.get(app_label="admin", model="domainalias")
for permission in Permission.objects.filter(content_type=ct):
group.permissions.remove(permission)
class Migration(migrations.Migration):
dependencies = [
('admin', '0001_initial'),
]
operations = [
migrations.RunPython(rename_and_clean),
]
## Instruction:
Handle the fresh install case.
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
def rename_and_clean(apps, schema_editor):
"""Rename old content types if necessary, remove permissions."""
ContentType = apps.get_model("contenttypes", "ContentType")
for ct in ContentType.objects.filter(app_label="admin"):
try:
old_ct = ContentType.objects.get(
app_label="modoboa_admin", model=ct.model)
except ContentType.DoesNotExist:
continue
old_ct.app_label = "admin"
ct.delete()
old_ct.save()
# Remove DomainAlias permissions from DomainAdmins group
Group = apps.get_model("auth", "Group")
try:
group = Group.objects.get(name="DomainAdmins")
except Group.DoesNotExist:
return
Permission = apps.get_model("auth", "Permission")
ct = ContentType.objects.get(app_label="admin", model="domainalias")
for permission in Permission.objects.filter(content_type=ct):
group.permissions.remove(permission)
class Migration(migrations.Migration):
dependencies = [
('admin', '0001_initial'),
]
operations = [
migrations.RunPython(rename_and_clean),
]
|
// ... existing code ...
Group = apps.get_model("auth", "Group")
try:
group = Group.objects.get(name="DomainAdmins")
except Group.DoesNotExist:
return
Permission = apps.get_model("auth", "Permission")
ct = ContentType.objects.get(app_label="admin", model="domainalias")
// ... rest of the code ...
|
838895500f8046b06718c184a4e8b12b42add516
|
wp2hugo.py
|
wp2hugo.py
|
import sys
from pprint import pprint
from lxml import etree
import html2text
from wp_parser import WordpressXMLParser
from hugo_printer import HugoPrinter
def main():
wp_xml_parser = WordpressXMLParser(sys.argv[1])
meta = wp_xml_parser.get_meta()
cats = wp_xml_parser.get_categories()
tags = wp_xml_parser.get_tags()
posts = wp_xml_parser.get_public_posts()
drafts = wp_xml_parser.get_drafts()
pprint(posts[-1])
if __name__ == '__main__':
main()
|
import sys
from pprint import pprint
from lxml import etree
import html2text
from wp_parser import WordpressXMLParser
from hugo_printer import HugoPrinter
def main():
wp_xml_parser = WordpressXMLParser(sys.argv[1])
wp_site_info = {
"meta": wp_xml_parser.get_meta(),
"cats": wp_xml_parser.get_categories(),
"tags": wp_xml_parser.get_tags(),
"posts": wp_xml_parser.get_public_posts(),
"drafts": wp_xml_parser.get_drafts(),
}
hugo_printer = HugoPrinter(**wp_site_info)
hugo_printer.gen_config()
if __name__ == '__main__':
main()
|
Call HugoPrinter to save config file
|
Call HugoPrinter to save config file
|
Python
|
mit
|
hzmangel/wp2hugo
|
import sys
from pprint import pprint
from lxml import etree
import html2text
from wp_parser import WordpressXMLParser
from hugo_printer import HugoPrinter
def main():
wp_xml_parser = WordpressXMLParser(sys.argv[1])
+ wp_site_info = {
- meta = wp_xml_parser.get_meta()
+ "meta": wp_xml_parser.get_meta(),
- cats = wp_xml_parser.get_categories()
+ "cats": wp_xml_parser.get_categories(),
- tags = wp_xml_parser.get_tags()
+ "tags": wp_xml_parser.get_tags(),
- posts = wp_xml_parser.get_public_posts()
+ "posts": wp_xml_parser.get_public_posts(),
- drafts = wp_xml_parser.get_drafts()
+ "drafts": wp_xml_parser.get_drafts(),
+ }
- pprint(posts[-1])
+ hugo_printer = HugoPrinter(**wp_site_info)
+ hugo_printer.gen_config()
if __name__ == '__main__':
main()
|
Call HugoPrinter to save config file
|
## Code Before:
import sys
from pprint import pprint
from lxml import etree
import html2text
from wp_parser import WordpressXMLParser
from hugo_printer import HugoPrinter
def main():
wp_xml_parser = WordpressXMLParser(sys.argv[1])
meta = wp_xml_parser.get_meta()
cats = wp_xml_parser.get_categories()
tags = wp_xml_parser.get_tags()
posts = wp_xml_parser.get_public_posts()
drafts = wp_xml_parser.get_drafts()
pprint(posts[-1])
if __name__ == '__main__':
main()
## Instruction:
Call HugoPrinter to save config file
## Code After:
import sys
from pprint import pprint
from lxml import etree
import html2text
from wp_parser import WordpressXMLParser
from hugo_printer import HugoPrinter
def main():
wp_xml_parser = WordpressXMLParser(sys.argv[1])
wp_site_info = {
"meta": wp_xml_parser.get_meta(),
"cats": wp_xml_parser.get_categories(),
"tags": wp_xml_parser.get_tags(),
"posts": wp_xml_parser.get_public_posts(),
"drafts": wp_xml_parser.get_drafts(),
}
hugo_printer = HugoPrinter(**wp_site_info)
hugo_printer.gen_config()
if __name__ == '__main__':
main()
|
// ... existing code ...
wp_site_info = {
"meta": wp_xml_parser.get_meta(),
"cats": wp_xml_parser.get_categories(),
"tags": wp_xml_parser.get_tags(),
"posts": wp_xml_parser.get_public_posts(),
"drafts": wp_xml_parser.get_drafts(),
}
hugo_printer = HugoPrinter(**wp_site_info)
hugo_printer.gen_config()
// ... rest of the code ...
|
8f094e1c3d4a64942cadf5603ce5b23706381fac
|
nubes/cmd/__init__.py
|
nubes/cmd/__init__.py
|
import openstack
def main():
print("Hello Clouds!")
|
import argparse
from nubes import dispatcher
def main():
parser = argparse.ArgumentParser(description='Universal IaaS CLI')
parser.add_argument('connector', help='IaaS Name')
parser.add_argument('resource', help='Resource to perform action')
parser.add_argument('action', help='Action to perform on resource')
parser.add_argument('--auth-url')
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--project-name')
args = parser.parse_args()
dispatch = dispatcher.Dispatcher(args.connector, args.auth_url,
args.username, args.password,
args.project_name)
resource = args.resource
if args.action == 'list':
# make plural
resource = args.resource + 's'
method_name = '_'.join([args.action, resource])
return getattr(dispatch, method_name)()
|
Make crude CLI commands work
|
Make crude CLI commands work
This is mainly as an example to show what it can look like.
|
Python
|
apache-2.0
|
omninubes/nubes
|
- import openstack
+ import argparse
+
+ from nubes import dispatcher
def main():
- print("Hello Clouds!")
+ parser = argparse.ArgumentParser(description='Universal IaaS CLI')
+ parser.add_argument('connector', help='IaaS Name')
+ parser.add_argument('resource', help='Resource to perform action')
+ parser.add_argument('action', help='Action to perform on resource')
+ parser.add_argument('--auth-url')
+ parser.add_argument('--username')
+ parser.add_argument('--password')
+ parser.add_argument('--project-name')
+ args = parser.parse_args()
+ dispatch = dispatcher.Dispatcher(args.connector, args.auth_url,
+ args.username, args.password,
+ args.project_name)
+ resource = args.resource
+ if args.action == 'list':
+ # make plural
+ resource = args.resource + 's'
+ method_name = '_'.join([args.action, resource])
+ return getattr(dispatch, method_name)()
+
|
Make crude CLI commands work
|
## Code Before:
import openstack
def main():
print("Hello Clouds!")
## Instruction:
Make crude CLI commands work
## Code After:
import argparse
from nubes import dispatcher
def main():
parser = argparse.ArgumentParser(description='Universal IaaS CLI')
parser.add_argument('connector', help='IaaS Name')
parser.add_argument('resource', help='Resource to perform action')
parser.add_argument('action', help='Action to perform on resource')
parser.add_argument('--auth-url')
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--project-name')
args = parser.parse_args()
dispatch = dispatcher.Dispatcher(args.connector, args.auth_url,
args.username, args.password,
args.project_name)
resource = args.resource
if args.action == 'list':
# make plural
resource = args.resource + 's'
method_name = '_'.join([args.action, resource])
return getattr(dispatch, method_name)()
|
// ... existing code ...
import argparse
from nubes import dispatcher
// ... modified code ...
def main():
parser = argparse.ArgumentParser(description='Universal IaaS CLI')
parser.add_argument('connector', help='IaaS Name')
parser.add_argument('resource', help='Resource to perform action')
parser.add_argument('action', help='Action to perform on resource')
parser.add_argument('--auth-url')
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--project-name')
args = parser.parse_args()
dispatch = dispatcher.Dispatcher(args.connector, args.auth_url,
args.username, args.password,
args.project_name)
resource = args.resource
if args.action == 'list':
# make plural
resource = args.resource + 's'
method_name = '_'.join([args.action, resource])
return getattr(dispatch, method_name)()
// ... rest of the code ...
|
5254e31d2309aa21b347d854293084eefddaa465
|
virtool/error_pages.py
|
virtool/error_pages.py
|
from aiohttp import web
from mako.template import Template
from virtool.utils import get_static_hash
template_500 = Template(filename="virtool/templates/error_500.html")
async def middleware_factory(app, handler):
async def middleware_handler(request):
try:
response = await handler(request)
if not request.path.startswith("/api"):
if response.status == 404:
return handle_404()
return response
except web.HTTPException as ex:
if ex.status == 404:
return handle_404()
raise
return middleware_handler
def handle_404():
html = Template(filename="virtool/templates/error_404.html").render(hash=get_static_hash())
return web.Response(body=html, content_type="text/html")
|
from aiohttp import web
from mako.template import Template
from virtool.utils import get_static_hash
template_500 = Template(filename="virtool/templates/error_500.html")
async def middleware_factory(app, handler):
async def middleware_handler(request):
is_api_call = request.path.startswith("/api")
try:
response = await handler(request)
if not is_api_call:
if response.status == 404:
return handle_404()
return response
except web.HTTPException as ex:
if not is_api_call and ex.status == 404:
return handle_404()
raise
return middleware_handler
def handle_404():
html = Template(filename="virtool/templates/error_404.html").render(hash=get_static_hash())
return web.Response(body=html, content_type="text/html")
|
Make HTTPExceptions return errors for /api calls
|
Make HTTPExceptions return errors for /api calls
|
Python
|
mit
|
virtool/virtool,virtool/virtool,igboyes/virtool,igboyes/virtool
|
from aiohttp import web
from mako.template import Template
from virtool.utils import get_static_hash
template_500 = Template(filename="virtool/templates/error_500.html")
async def middleware_factory(app, handler):
async def middleware_handler(request):
+ is_api_call = request.path.startswith("/api")
+
try:
response = await handler(request)
- if not request.path.startswith("/api"):
+ if not is_api_call:
if response.status == 404:
return handle_404()
return response
except web.HTTPException as ex:
- if ex.status == 404:
+ if not is_api_call and ex.status == 404:
return handle_404()
raise
return middleware_handler
def handle_404():
html = Template(filename="virtool/templates/error_404.html").render(hash=get_static_hash())
return web.Response(body=html, content_type="text/html")
|
Make HTTPExceptions return errors for /api calls
|
## Code Before:
from aiohttp import web
from mako.template import Template
from virtool.utils import get_static_hash
template_500 = Template(filename="virtool/templates/error_500.html")
async def middleware_factory(app, handler):
async def middleware_handler(request):
try:
response = await handler(request)
if not request.path.startswith("/api"):
if response.status == 404:
return handle_404()
return response
except web.HTTPException as ex:
if ex.status == 404:
return handle_404()
raise
return middleware_handler
def handle_404():
html = Template(filename="virtool/templates/error_404.html").render(hash=get_static_hash())
return web.Response(body=html, content_type="text/html")
## Instruction:
Make HTTPExceptions return errors for /api calls
## Code After:
from aiohttp import web
from mako.template import Template
from virtool.utils import get_static_hash
template_500 = Template(filename="virtool/templates/error_500.html")
async def middleware_factory(app, handler):
async def middleware_handler(request):
is_api_call = request.path.startswith("/api")
try:
response = await handler(request)
if not is_api_call:
if response.status == 404:
return handle_404()
return response
except web.HTTPException as ex:
if not is_api_call and ex.status == 404:
return handle_404()
raise
return middleware_handler
def handle_404():
html = Template(filename="virtool/templates/error_404.html").render(hash=get_static_hash())
return web.Response(body=html, content_type="text/html")
|
// ... existing code ...
async def middleware_handler(request):
is_api_call = request.path.startswith("/api")
try:
// ... modified code ...
if not is_api_call:
if response.status == 404:
...
except web.HTTPException as ex:
if not is_api_call and ex.status == 404:
return handle_404()
// ... rest of the code ...
|
a35a25732159e4c8b5655755ce31ec4c3e6e7975
|
dummy_robot/dummy_robot_bringup/launch/dummy_robot_bringup.launch.py
|
dummy_robot/dummy_robot_bringup/launch/dummy_robot_bringup.launch.py
|
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch_ros.actions import Node
def generate_launch_description():
# TODO(wjwwood): Use a substitution to find share directory once this is implemented in launch
urdf = os.path.join(get_package_share_directory('dummy_robot_bringup'),
'launch', 'single_rrbot.urdf')
return LaunchDescription([
Node(package='dummy_map_server', node_executable='dummy_map_server', output='screen'),
Node(package='robot_state_publisher', node_executable='robot_state_publisher',
output='screen', arguments=[urdf]),
Node(package='dummy_sensors', node_executable='dummy_joint_states', output='screen'),
Node(package='dummy_sensors', node_executable='dummy_laser', output='screen')
])
|
import os
from launch import LaunchDescription
from launch_ros.actions import Node
from launch_ros.substitutions import FindPackageShare
def generate_launch_description():
pkg_share = FindPackageShare('dummy_robot_bringup').find('dummy_robot_bringup')
urdf_file = os.path.join(pkg_share, 'launch', 'single_rrbot.urdf')
with open(urdf_file, 'r') as infp:
robot_desc = infp.read()
rsp_params = {'robot_description': robot_desc}
return LaunchDescription([
Node(package='dummy_map_server', node_executable='dummy_map_server', output='screen'),
Node(package='robot_state_publisher', node_executable='robot_state_publisher_node',
output='screen', parameters=[rsp_params]),
Node(package='dummy_sensors', node_executable='dummy_joint_states', output='screen'),
Node(package='dummy_sensors', node_executable='dummy_laser', output='screen')
])
|
Switch dummy_robot_bringup to use parameter for rsp.
|
Switch dummy_robot_bringup to use parameter for rsp.
Signed-off-by: Chris Lalancette <[email protected]>
|
Python
|
apache-2.0
|
ros2/demos,ros2/demos,ros2/demos,ros2/demos
|
import os
- from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch_ros.actions import Node
+ from launch_ros.substitutions import FindPackageShare
def generate_launch_description():
- # TODO(wjwwood): Use a substitution to find share directory once this is implemented in launch
- urdf = os.path.join(get_package_share_directory('dummy_robot_bringup'),
- 'launch', 'single_rrbot.urdf')
+ pkg_share = FindPackageShare('dummy_robot_bringup').find('dummy_robot_bringup')
+ urdf_file = os.path.join(pkg_share, 'launch', 'single_rrbot.urdf')
+ with open(urdf_file, 'r') as infp:
+ robot_desc = infp.read()
+ rsp_params = {'robot_description': robot_desc}
+
return LaunchDescription([
Node(package='dummy_map_server', node_executable='dummy_map_server', output='screen'),
- Node(package='robot_state_publisher', node_executable='robot_state_publisher',
+ Node(package='robot_state_publisher', node_executable='robot_state_publisher_node',
- output='screen', arguments=[urdf]),
+ output='screen', parameters=[rsp_params]),
Node(package='dummy_sensors', node_executable='dummy_joint_states', output='screen'),
Node(package='dummy_sensors', node_executable='dummy_laser', output='screen')
])
|
Switch dummy_robot_bringup to use parameter for rsp.
|
## Code Before:
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch_ros.actions import Node
def generate_launch_description():
# TODO(wjwwood): Use a substitution to find share directory once this is implemented in launch
urdf = os.path.join(get_package_share_directory('dummy_robot_bringup'),
'launch', 'single_rrbot.urdf')
return LaunchDescription([
Node(package='dummy_map_server', node_executable='dummy_map_server', output='screen'),
Node(package='robot_state_publisher', node_executable='robot_state_publisher',
output='screen', arguments=[urdf]),
Node(package='dummy_sensors', node_executable='dummy_joint_states', output='screen'),
Node(package='dummy_sensors', node_executable='dummy_laser', output='screen')
])
## Instruction:
Switch dummy_robot_bringup to use parameter for rsp.
## Code After:
import os
from launch import LaunchDescription
from launch_ros.actions import Node
from launch_ros.substitutions import FindPackageShare
def generate_launch_description():
pkg_share = FindPackageShare('dummy_robot_bringup').find('dummy_robot_bringup')
urdf_file = os.path.join(pkg_share, 'launch', 'single_rrbot.urdf')
with open(urdf_file, 'r') as infp:
robot_desc = infp.read()
rsp_params = {'robot_description': robot_desc}
return LaunchDescription([
Node(package='dummy_map_server', node_executable='dummy_map_server', output='screen'),
Node(package='robot_state_publisher', node_executable='robot_state_publisher_node',
output='screen', parameters=[rsp_params]),
Node(package='dummy_sensors', node_executable='dummy_joint_states', output='screen'),
Node(package='dummy_sensors', node_executable='dummy_laser', output='screen')
])
|
...
from launch import LaunchDescription
...
from launch_ros.actions import Node
from launch_ros.substitutions import FindPackageShare
...
def generate_launch_description():
pkg_share = FindPackageShare('dummy_robot_bringup').find('dummy_robot_bringup')
urdf_file = os.path.join(pkg_share, 'launch', 'single_rrbot.urdf')
with open(urdf_file, 'r') as infp:
robot_desc = infp.read()
rsp_params = {'robot_description': robot_desc}
return LaunchDescription([
...
Node(package='dummy_map_server', node_executable='dummy_map_server', output='screen'),
Node(package='robot_state_publisher', node_executable='robot_state_publisher_node',
output='screen', parameters=[rsp_params]),
Node(package='dummy_sensors', node_executable='dummy_joint_states', output='screen'),
...
|
0158579b9a6c729e7af9a543caeef25018e07834
|
conda_build/ldd.py
|
conda_build/ldd.py
|
from __future__ import absolute_import, division, print_function
import re
import subprocess
from conda_build import post
LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)')
LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found')
def ldd(path):
"thin wrapper around ldd"
lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines()
res = []
for line in lines:
if '=>' not in line:
continue
assert line[0] == '\t', (path, line)
m = LDD_RE.match(line)
if m:
res.append(m.groups())
continue
m = LDD_NOT_FOUND_RE.match(line)
if m:
res.append((m.group(1), 'not found'))
continue
if 'ld-linux' in line:
continue
raise RuntimeError("Unexpected output from ldd: %s" % line)
return res
|
from __future__ import absolute_import, division, print_function
import re
import subprocess
import json
from os.path import join
from conda.install import rm_rf
from conda_build import post
from conda_build.config import config
from conda_build.build import create_env
LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)')
LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found')
def ldd(path):
"thin wrapper around ldd"
lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines()
res = []
for line in lines:
if '=>' not in line:
continue
assert line[0] == '\t', (path, line)
m = LDD_RE.match(line)
if m:
res.append(m.groups())
continue
m = LDD_NOT_FOUND_RE.match(line)
if m:
res.append((m.group(1), 'not found'))
continue
if 'ld-linux' in line:
continue
raise RuntimeError("Unexpected output from ldd: %s" % line)
return res
def get_package_linkages(pkg):
rm_rf(config.test_prefix)
specs = ['%s %s %s' % (pkg.rsplit('.tar.bz2', 1)[0].rsplit('-', 2))]
create_env(config.test_prefix, specs)
res = {}
with open(join(config.test_prefix, 'conda-meta', '-'.join(specs[0]) +
'.json')) as f:
data = json.load(f)
files = data['files']
for f in files:
if post.is_obj(f):
res[f] = ldd(f)
return res
|
Add first pass at a get_package_linkages function
|
Add first pass at a get_package_linkages function
|
Python
|
bsd-3-clause
|
takluyver/conda-build,takluyver/conda-build,sandhujasmine/conda-build,frol/conda-build,frol/conda-build,ilastik/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,rmcgibbo/conda-build,dan-blanchard/conda-build,sandhujasmine/conda-build,ilastik/conda-build,ilastik/conda-build,shastings517/conda-build,sandhujasmine/conda-build,rmcgibbo/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,shastings517/conda-build,rmcgibbo/conda-build,takluyver/conda-build,shastings517/conda-build,mwcraig/conda-build,frol/conda-build
|
from __future__ import absolute_import, division, print_function
import re
import subprocess
+ import json
+ from os.path import join
+
+ from conda.install import rm_rf
from conda_build import post
+ from conda_build.config import config
+ from conda_build.build import create_env
+
LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)')
LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found')
def ldd(path):
"thin wrapper around ldd"
lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines()
res = []
for line in lines:
if '=>' not in line:
continue
assert line[0] == '\t', (path, line)
m = LDD_RE.match(line)
if m:
res.append(m.groups())
continue
m = LDD_NOT_FOUND_RE.match(line)
if m:
res.append((m.group(1), 'not found'))
continue
if 'ld-linux' in line:
continue
raise RuntimeError("Unexpected output from ldd: %s" % line)
return res
+ def get_package_linkages(pkg):
+ rm_rf(config.test_prefix)
+ specs = ['%s %s %s' % (pkg.rsplit('.tar.bz2', 1)[0].rsplit('-', 2))]
+
+ create_env(config.test_prefix, specs)
+
+ res = {}
+
+ with open(join(config.test_prefix, 'conda-meta', '-'.join(specs[0]) +
+ '.json')) as f:
+ data = json.load(f)
+
+ files = data['files']
+ for f in files:
+ if post.is_obj(f):
+ res[f] = ldd(f)
+
+ return res
+
|
Add first pass at a get_package_linkages function
|
## Code Before:
from __future__ import absolute_import, division, print_function
import re
import subprocess
from conda_build import post
LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)')
LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found')
def ldd(path):
"thin wrapper around ldd"
lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines()
res = []
for line in lines:
if '=>' not in line:
continue
assert line[0] == '\t', (path, line)
m = LDD_RE.match(line)
if m:
res.append(m.groups())
continue
m = LDD_NOT_FOUND_RE.match(line)
if m:
res.append((m.group(1), 'not found'))
continue
if 'ld-linux' in line:
continue
raise RuntimeError("Unexpected output from ldd: %s" % line)
return res
## Instruction:
Add first pass at a get_package_linkages function
## Code After:
from __future__ import absolute_import, division, print_function
import re
import subprocess
import json
from os.path import join
from conda.install import rm_rf
from conda_build import post
from conda_build.config import config
from conda_build.build import create_env
LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)')
LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found')
def ldd(path):
"thin wrapper around ldd"
lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines()
res = []
for line in lines:
if '=>' not in line:
continue
assert line[0] == '\t', (path, line)
m = LDD_RE.match(line)
if m:
res.append(m.groups())
continue
m = LDD_NOT_FOUND_RE.match(line)
if m:
res.append((m.group(1), 'not found'))
continue
if 'ld-linux' in line:
continue
raise RuntimeError("Unexpected output from ldd: %s" % line)
return res
def get_package_linkages(pkg):
rm_rf(config.test_prefix)
specs = ['%s %s %s' % (pkg.rsplit('.tar.bz2', 1)[0].rsplit('-', 2))]
create_env(config.test_prefix, specs)
res = {}
with open(join(config.test_prefix, 'conda-meta', '-'.join(specs[0]) +
'.json')) as f:
data = json.load(f)
files = data['files']
for f in files:
if post.is_obj(f):
res[f] = ldd(f)
return res
|
// ... existing code ...
import subprocess
import json
from os.path import join
from conda.install import rm_rf
// ... modified code ...
from conda_build import post
from conda_build.config import config
from conda_build.build import create_env
...
return res
def get_package_linkages(pkg):
rm_rf(config.test_prefix)
specs = ['%s %s %s' % (pkg.rsplit('.tar.bz2', 1)[0].rsplit('-', 2))]
create_env(config.test_prefix, specs)
res = {}
with open(join(config.test_prefix, 'conda-meta', '-'.join(specs[0]) +
'.json')) as f:
data = json.load(f)
files = data['files']
for f in files:
if post.is_obj(f):
res[f] = ldd(f)
return res
// ... rest of the code ...
|
8dd41fab9a43ef43d5f2dc27e11bdbda3c23bc56
|
soapbox/tests/urls.py
|
soapbox/tests/urls.py
|
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns(
'',
url(r'^$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/bar/$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/bar/baz/$',
TemplateView.as_view(
template_name='soapboxtest/test_context_processor.html')),
url(r'^fail/$',
TemplateView.as_view(
template_name='soapboxtest/test_fail_syntax.html')),
url(r'^bad-url-var/$',
TemplateView.as_view(
template_name='soapboxtest/test_bad_variable.html')),
)
|
from django.conf.urls import url
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/bar/$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/bar/baz/$',
TemplateView.as_view(
template_name='soapboxtest/test_context_processor.html')),
url(r'^fail/$',
TemplateView.as_view(
template_name='soapboxtest/test_fail_syntax.html')),
url(r'^bad-url-var/$',
TemplateView.as_view(
template_name='soapboxtest/test_bad_variable.html')),
]
|
Stop using patterns() in the test URLs.
|
Stop using patterns() in the test URLs.
|
Python
|
bsd-3-clause
|
ubernostrum/django-soapbox,ubernostrum/django-soapbox
|
- from django.conf.urls import patterns, url
+ from django.conf.urls import url
from django.views.generic import TemplateView
+ urlpatterns = [
- urlpatterns = patterns(
- '',
url(r'^$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/bar/$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/bar/baz/$',
TemplateView.as_view(
template_name='soapboxtest/test_context_processor.html')),
url(r'^fail/$',
TemplateView.as_view(
template_name='soapboxtest/test_fail_syntax.html')),
url(r'^bad-url-var/$',
TemplateView.as_view(
template_name='soapboxtest/test_bad_variable.html')),
- )
+ ]
|
Stop using patterns() in the test URLs.
|
## Code Before:
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns(
'',
url(r'^$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/bar/$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/bar/baz/$',
TemplateView.as_view(
template_name='soapboxtest/test_context_processor.html')),
url(r'^fail/$',
TemplateView.as_view(
template_name='soapboxtest/test_fail_syntax.html')),
url(r'^bad-url-var/$',
TemplateView.as_view(
template_name='soapboxtest/test_bad_variable.html')),
)
## Instruction:
Stop using patterns() in the test URLs.
## Code After:
from django.conf.urls import url
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/bar/$',
TemplateView.as_view(
template_name='soapboxtest/test_success.html')),
url(r'^foo/bar/baz/$',
TemplateView.as_view(
template_name='soapboxtest/test_context_processor.html')),
url(r'^fail/$',
TemplateView.as_view(
template_name='soapboxtest/test_fail_syntax.html')),
url(r'^bad-url-var/$',
TemplateView.as_view(
template_name='soapboxtest/test_bad_variable.html')),
]
|
# ... existing code ...
from django.conf.urls import url
from django.views.generic import TemplateView
# ... modified code ...
urlpatterns = [
url(r'^$',
...
template_name='soapboxtest/test_bad_variable.html')),
]
# ... rest of the code ...
|
c769b66c546ad3fd9d04c0607506a49e9d3bff4a
|
fortdepend/preprocessor.py
|
fortdepend/preprocessor.py
|
import io
import pcpp
class FortranPreprocessor(pcpp.Preprocessor):
def __init__(self):
super().__init__()
def parse_to_string_lines(self, text):
with io.StringIO() as f:
self.parse(text)
self.write(f)
f.seek(0)
result = f.readlines()
return result
|
import io
import pcpp
class FortranPreprocessor(pcpp.Preprocessor):
def __init__(self):
super(pcpp.Preprocessor, self).__init__()
def parse_to_string_lines(self, text):
with io.StringIO() as f:
self.parse(text)
self.write(f)
f.seek(0)
result = f.readlines()
return result
|
Fix super() call for py2.7
|
Fix super() call for py2.7
|
Python
|
mit
|
ZedThree/fort_depend.py,ZedThree/fort_depend.py
|
import io
import pcpp
class FortranPreprocessor(pcpp.Preprocessor):
def __init__(self):
- super().__init__()
+ super(pcpp.Preprocessor, self).__init__()
def parse_to_string_lines(self, text):
with io.StringIO() as f:
self.parse(text)
self.write(f)
f.seek(0)
result = f.readlines()
return result
|
Fix super() call for py2.7
|
## Code Before:
import io
import pcpp
class FortranPreprocessor(pcpp.Preprocessor):
def __init__(self):
super().__init__()
def parse_to_string_lines(self, text):
with io.StringIO() as f:
self.parse(text)
self.write(f)
f.seek(0)
result = f.readlines()
return result
## Instruction:
Fix super() call for py2.7
## Code After:
import io
import pcpp
class FortranPreprocessor(pcpp.Preprocessor):
def __init__(self):
super(pcpp.Preprocessor, self).__init__()
def parse_to_string_lines(self, text):
with io.StringIO() as f:
self.parse(text)
self.write(f)
f.seek(0)
result = f.readlines()
return result
|
// ... existing code ...
def __init__(self):
super(pcpp.Preprocessor, self).__init__()
// ... rest of the code ...
|
eb9b1cc747dc807a52ee7d0dec0992eb70005840
|
cacao_app/configuracion/models.py
|
cacao_app/configuracion/models.py
|
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField('Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField('Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen', upload_to='cacao/')
class Meta:
verbose_name = "Configuracion de Aplicación"
|
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField(
'Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField(
'Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen',
upload_to='cacao/',
help_text="Formato PNG transparente y 512x512 pixels de tamaño") # noqa
class Meta:
verbose_name = "Configuracion de Aplicación"
|
Set help text for app logo
|
Set help text for app logo
|
Python
|
bsd-3-clause
|
CacaoMovil/guia-de-cacao-django,CacaoMovil/guia-de-cacao-django,CacaoMovil/guia-de-cacao-django
|
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
+
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
- informacion_contacto = RichTextField('Informacion de Contacto', config_name='default')
+ informacion_contacto = RichTextField(
+ 'Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
+
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
- informacion_bienvenida = RichTextField('Informacion de Bienvenida', config_name='default')
+ informacion_bienvenida = RichTextField(
+ 'Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
+
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
- logo = models.ImageField('Imagen', upload_to='cacao/')
+ logo = models.ImageField('Imagen',
+ upload_to='cacao/',
+ help_text="Formato PNG transparente y 512x512 pixels de tamaño") # noqa
class Meta:
verbose_name = "Configuracion de Aplicación"
|
Set help text for app logo
|
## Code Before:
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField('Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField('Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen', upload_to='cacao/')
class Meta:
verbose_name = "Configuracion de Aplicación"
## Instruction:
Set help text for app logo
## Code After:
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField(
'Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField(
'Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen',
upload_to='cacao/',
help_text="Formato PNG transparente y 512x512 pixels de tamaño") # noqa
class Meta:
verbose_name = "Configuracion de Aplicación"
|
// ... existing code ...
from ckeditor.fields import RichTextField
// ... modified code ...
"""
informacion_contacto = RichTextField(
'Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
...
verbose_name = "Configuracion Contacto"
...
"""
informacion_bienvenida = RichTextField(
'Informacion de Bienvenida', config_name='default')
...
class Application(SingletonModel):
...
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen',
upload_to='cacao/',
help_text="Formato PNG transparente y 512x512 pixels de tamaño") # noqa
// ... rest of the code ...
|
32f1ce16ce9df1f4615a0403ed56bf6fd7dbbef4
|
slackbotpry/event.py
|
slackbotpry/event.py
|
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.remove_reaction(emoji, channel, timestamp)
|
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
return self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.remove_reaction(emoji, channel, timestamp)
|
Add missing return of Event methods
|
Add missing return of Event methods
|
Python
|
mit
|
rokurosatp/slackbotpry
|
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
- self.bot.post_message(text, channel)
+ return self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
- self.bot.add_reaction(emoji, channel, timestamp)
+ return self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
- self.bot.remove_reaction(emoji, channel, timestamp)
+ return self.bot.remove_reaction(emoji, channel, timestamp)
|
Add missing return of Event methods
|
## Code Before:
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
self.bot.remove_reaction(emoji, channel, timestamp)
## Instruction:
Add missing return of Event methods
## Code After:
class Event:
def __init__(self, bot, data):
self.bot = bot
self.data = data
def post_message(self, text, channel=None):
if channel is None:
channel = self.data['channel']
return self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
if channel is None:
channel = self.data['channel']
if timestamp is None:
timestamp = self.data['ts']
return self.bot.remove_reaction(emoji, channel, timestamp)
|
...
channel = self.data['channel']
return self.bot.post_message(text, channel)
def add_reaction(self, emoji, channel=None, timestamp=None):
...
timestamp = self.data['ts']
return self.bot.add_reaction(emoji, channel, timestamp)
def remove_reaction(self, emoji, channel=None, timestamp=None):
...
timestamp = self.data['ts']
return self.bot.remove_reaction(emoji, channel, timestamp)
...
|
e861def07da1f0dea7f5273d06e7dc674a79025f
|
adventure/urls.py
|
adventure/urls.py
|
from django.conf.urls import url, include
from rest_framework import routers
from . import views
from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'players', PlayerViewSet)
router.register(r'adventures', AdventureViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', views.index, name='index'),
url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'),
# this route is a catch-all for compatibility with the Angular routes. It must be last in the list.
# NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app.
url(r'^(?P<path>.*)/$', views.index),
]
|
from django.conf.urls import url, include
from rest_framework import routers
from . import views
from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'players', PlayerViewSet)
router.register(r'adventures', AdventureViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', views.index, name='index'),
url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'),
# this route is a catch-all for compatibility with the Angular routes. It must be last in the list.
# NOTE: this currently matches URLs without a . in them, so .js files and broken images will still 404.
# NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app.
url(r'^(?P<path>[^\.]*)/$', views.index),
]
|
Update Django catch-all URL path to not catch URLs with a . in them.
|
Update Django catch-all URL path to not catch URLs with a . in them.
This makes missing JS files 404 properly instead of returning the HTML 404 page which confuses the parser.
|
Python
|
mit
|
kdechant/eamon,kdechant/eamon,kdechant/eamon,kdechant/eamon
|
from django.conf.urls import url, include
from rest_framework import routers
from . import views
from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'players', PlayerViewSet)
router.register(r'adventures', AdventureViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', views.index, name='index'),
url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'),
# this route is a catch-all for compatibility with the Angular routes. It must be last in the list.
+ # NOTE: this currently matches URLs without a . in them, so .js files and broken images will still 404.
# NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app.
- url(r'^(?P<path>.*)/$', views.index),
+ url(r'^(?P<path>[^\.]*)/$', views.index),
]
|
Update Django catch-all URL path to not catch URLs with a . in them.
|
## Code Before:
from django.conf.urls import url, include
from rest_framework import routers
from . import views
from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'players', PlayerViewSet)
router.register(r'adventures', AdventureViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', views.index, name='index'),
url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'),
# this route is a catch-all for compatibility with the Angular routes. It must be last in the list.
# NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app.
url(r'^(?P<path>.*)/$', views.index),
]
## Instruction:
Update Django catch-all URL path to not catch URLs with a . in them.
## Code After:
from django.conf.urls import url, include
from rest_framework import routers
from . import views
from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'players', PlayerViewSet)
router.register(r'adventures', AdventureViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', views.index, name='index'),
url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'),
# this route is a catch-all for compatibility with the Angular routes. It must be last in the list.
# NOTE: this currently matches URLs without a . in them, so .js files and broken images will still 404.
# NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app.
url(r'^(?P<path>[^\.]*)/$', views.index),
]
|
// ... existing code ...
# this route is a catch-all for compatibility with the Angular routes. It must be last in the list.
# NOTE: this currently matches URLs without a . in them, so .js files and broken images will still 404.
# NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app.
url(r'^(?P<path>[^\.]*)/$', views.index),
]
// ... rest of the code ...
|
4520360a0bbf223805cc963d58409626be2bd728
|
capstone/mdp/fixed_game_mdp.py
|
capstone/mdp/fixed_game_mdp.py
|
from .mdp import MDP
from .game_mdp import GameMDP
from ..utils import utility
class FixedGameMDP(GameMDP):
def __init__(self, game, opp_player, opp_idx):
'''
opp_player: the opponent player
opp_idx: the idx of the opponent player in the game
'''
self._game = game
self._opp_player = opp_player
self._opp_idx = opp_idx
self._agent_idx = opp_idx ^ 1
self._states = {}
def reward(self, game, move, next_game):
return utility(next_game, self._agent_idx) if next_game.is_over() else 0
def start_state(self):
new_game = self._game.copy()
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return new_game
def transitions(self, game, move):
if game.is_over():
return []
new_game = game.copy().make_move(move)
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return [(new_game, 1.0)]
|
from .mdp import MDP
from .game_mdp import GameMDP
from ..utils import utility
class FixedGameMDP(GameMDP):
def __init__(self, game, opp_player, opp_idx):
'''
opp_player: the opponent player
opp_idx: the idx of the opponent player in the game
'''
super(FixedGameMDP, self).__init__(game)
self._opp_player = opp_player
self._opp_idx = opp_idx
self._agent_idx = opp_idx ^ 1
def reward(self, game, move, next_game):
return utility(next_game, self._agent_idx) if next_game.is_over() else 0
def start_state(self):
new_game = self._game.copy()
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return new_game
def transitions(self, game, move):
if game.is_over():
return []
new_game = game.copy().make_move(move)
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return [(new_game, 1.0)]
|
Call super __init__ in GameMDP
|
Call super __init__ in GameMDP
|
Python
|
mit
|
davidrobles/mlnd-capstone-code
|
from .mdp import MDP
from .game_mdp import GameMDP
from ..utils import utility
class FixedGameMDP(GameMDP):
def __init__(self, game, opp_player, opp_idx):
'''
opp_player: the opponent player
opp_idx: the idx of the opponent player in the game
'''
- self._game = game
+ super(FixedGameMDP, self).__init__(game)
self._opp_player = opp_player
self._opp_idx = opp_idx
self._agent_idx = opp_idx ^ 1
- self._states = {}
def reward(self, game, move, next_game):
return utility(next_game, self._agent_idx) if next_game.is_over() else 0
def start_state(self):
new_game = self._game.copy()
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return new_game
def transitions(self, game, move):
if game.is_over():
return []
new_game = game.copy().make_move(move)
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return [(new_game, 1.0)]
|
Call super __init__ in GameMDP
|
## Code Before:
from .mdp import MDP
from .game_mdp import GameMDP
from ..utils import utility
class FixedGameMDP(GameMDP):
def __init__(self, game, opp_player, opp_idx):
'''
opp_player: the opponent player
opp_idx: the idx of the opponent player in the game
'''
self._game = game
self._opp_player = opp_player
self._opp_idx = opp_idx
self._agent_idx = opp_idx ^ 1
self._states = {}
def reward(self, game, move, next_game):
return utility(next_game, self._agent_idx) if next_game.is_over() else 0
def start_state(self):
new_game = self._game.copy()
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return new_game
def transitions(self, game, move):
if game.is_over():
return []
new_game = game.copy().make_move(move)
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return [(new_game, 1.0)]
## Instruction:
Call super __init__ in GameMDP
## Code After:
from .mdp import MDP
from .game_mdp import GameMDP
from ..utils import utility
class FixedGameMDP(GameMDP):
def __init__(self, game, opp_player, opp_idx):
'''
opp_player: the opponent player
opp_idx: the idx of the opponent player in the game
'''
super(FixedGameMDP, self).__init__(game)
self._opp_player = opp_player
self._opp_idx = opp_idx
self._agent_idx = opp_idx ^ 1
def reward(self, game, move, next_game):
return utility(next_game, self._agent_idx) if next_game.is_over() else 0
def start_state(self):
new_game = self._game.copy()
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return new_game
def transitions(self, game, move):
if game.is_over():
return []
new_game = game.copy().make_move(move)
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return [(new_game, 1.0)]
|
...
'''
super(FixedGameMDP, self).__init__(game)
self._opp_player = opp_player
...
self._agent_idx = opp_idx ^ 1
...
|
e435592d64dbd4f75a7cc9d1ac8bb17ab4177a2b
|
erpnext/patches/v4_2/default_website_style.py
|
erpnext/patches/v4_2/default_website_style.py
|
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
Fix default website style patch (reload doc)
|
[minor] Fix default website style patch (reload doc)
|
Python
|
agpl-3.0
|
gangadharkadam/saloon_erp,hatwar/buyback-erpnext,gangadharkadam/v6_erp,indictranstech/Das_Erpnext,gangadharkadam/vlinkerp,shft117/SteckerApp,sheafferusa/erpnext,mahabuber/erpnext,hernad/erpnext,suyashphadtare/gd-erp,gangadharkadam/letzerp,indictranstech/internal-erpnext,indictranstech/buyback-erp,4commerce-technologies-AG/erpnext,indictranstech/buyback-erp,shitolepriya/test-erp,rohitwaghchaure/New_Theme_Erp,indictranstech/trufil-erpnext,gangadharkadam/v5_erp,mahabuber/erpnext,indictranstech/Das_Erpnext,suyashphadtare/vestasi-erp-jan-end,gangadhar-kadam/verve_test_erp,SPKian/Testing2,mbauskar/omnitech-demo-erpnext,gsnbng/erpnext,hernad/erpnext,sheafferusa/erpnext,mbauskar/phrerp,indictranstech/trufil-erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/huntercamp_erpnext,netfirms/erpnext,MartinEnder/erpnext-de,gangadhar-kadam/latestchurcherp,hatwar/Das_erpnext,indictranstech/fbd_erpnext,gangadharkadam/saloon_erp,hanselke/erpnext-1,njmube/erpnext,Tejal011089/trufil-erpnext,fuhongliang/erpnext,gangadhar-kadam/helpdesk-erpnext,SPKian/Testing2,pombredanne/erpnext,sagar30051991/ozsmart-erp,gangadharkadam/verveerp,gangadharkadam/v4_erp,gangadhar-kadam/verve_test_erp,mbauskar/phrerp,indictranstech/focal-erpnext,rohitwaghchaure/GenieManager-erpnext,rohitwaghchaure/New_Theme_Erp,mbauskar/Das_Erpnext,mbauskar/helpdesk-erpnext,suyashphadtare/sajil-erp,gangadhar-kadam/verve_test_erp,susuchina/ERPNEXT,gangadharkadam/verveerp,ShashaQin/erpnext,netfirms/erpnext,njmube/erpnext,SPKian/Testing,mbauskar/Das_Erpnext,mbauskar/sapphire-erpnext,gangadhar-kadam/verve_erp,suyashphadtare/vestasi-erp-1,gangadharkadam/vlinkerp,Tejal011089/paypal_erpnext,indictranstech/reciphergroup-erpnext,treejames/erpnext,rohitwaghchaure/erpnext_smart,gangadharkadam/v4_erp,tmimori/erpnext,suyashphadtare/vestasi-erp-final,mahabuber/erpnext,indictranstech/osmosis-erpnext,hatwar/focal-erpnext,hatwar/buyback-erpnext,treejames/erpnext,suyashphadtare/vestasi-update-erp,gangadharkadam/contributionerp,geekroot/erpnext,shitolepriya/test-erp,Tejal011089/trufil-erpnext,ThiagoGarciaAlves/erpnext,rohitwaghchaure/erpnext_smart,hernad/erpnext,ThiagoGarciaAlves/erpnext,rohitwaghchaure/digitales_erpnext,rohitwaghchaure/erpnext-receipher,BhupeshGupta/erpnext,gangadhar-kadam/helpdesk-erpnext,anandpdoshi/erpnext,meisterkleister/erpnext,suyashphadtare/gd-erp,mbauskar/omnitech-erpnext,susuchina/ERPNEXT,pombredanne/erpnext,Tejal011089/fbd_erpnext,gangadhar-kadam/verve_test_erp,Tejal011089/trufil-erpnext,indictranstech/erpnext,gsnbng/erpnext,indictranstech/trufil-erpnext,indictranstech/vestasi-erpnext,Suninus/erpnext,mbauskar/alec_frappe5_erpnext,dieface/erpnext,indictranstech/focal-erpnext,aruizramon/alec_erpnext,gangadharkadam/saloon_erp_install,sagar30051991/ozsmart-erp,indictranstech/vestasi-erpnext,mbauskar/alec_frappe5_erpnext,indictranstech/biggift-erpnext,gmarke/erpnext,gangadhar-kadam/latestchurcherp,gangadhar-kadam/verve-erp,ShashaQin/erpnext,suyashphadtare/gd-erp,suyashphadtare/sajil-final-erp,geekroot/erpnext,Drooids/erpnext,geekroot/erpnext,gangadharkadam/saloon_erp_install,indictranstech/focal-erpnext,suyashphadtare/sajil-erp,gangadharkadam/verveerp,saurabh6790/test-erp,hanselke/erpnext-1,hatwar/Das_erpnext,gangadharkadam/letzerp,gangadhar-kadam/helpdesk-erpnext,indictranstech/Das_Erpnext,mbauskar/Das_Erpnext,mbauskar/helpdesk-erpnext,Tejal011089/digitales_erpnext,anandpdoshi/erpnext,indictranstech/osmosis-erpnext,gmarke/erpnext,gangadharkadam/v4_erp,MartinEnder/erpnext-de,mbauskar/omnitech-erpnext,Tejal011089/fbd_erpnext,saurabh6790/test-erp,indictranstech/buyback-erp,mbauskar/phrerp,indictranstech/phrerp,indictranstech/trufil-erpnext,Suninus/erpnext,gsnbng/erpnext,rohitwaghchaure/digitales_erpnext,Drooids/erpnext,fuhongliang/erpnext,sheafferusa/erpnext,gangadharkadam/saloon_erp,mbauskar/sapphire-erpnext,Tejal011089/huntercamp_erpnext,indictranstech/internal-erpnext,mbauskar/sapphire-erpnext,indictranstech/fbd_erpnext,hanselke/erpnext-1,sheafferusa/erpnext,gangadharkadam/v6_erp,MartinEnder/erpnext-de,4commerce-technologies-AG/erpnext,indictranstech/biggift-erpnext,mbauskar/helpdesk-erpnext,anandpdoshi/erpnext,gangadharkadam/vlinkerp,indictranstech/reciphergroup-erpnext,gangadhar-kadam/verve-erp,mbauskar/helpdesk-erpnext,geekroot/erpnext,gangadharkadam/v5_erp,Tejal011089/digitales_erpnext,indictranstech/tele-erpnext,ThiagoGarciaAlves/erpnext,suyashphadtare/vestasi-erp-final,Tejal011089/huntercamp_erpnext,indictranstech/reciphergroup-erpnext,suyashphadtare/vestasi-erp-jan-end,indictranstech/biggift-erpnext,rohitwaghchaure/GenieManager-erpnext,mbauskar/omnitech-erpnext,suyashphadtare/vestasi-erp-final,indictranstech/vestasi-erpnext,saurabh6790/test-erp,treejames/erpnext,pawaranand/phrerp,gangadhar-kadam/verve_erp,mbauskar/sapphire-erpnext,gangadharkadam/letzerp,aruizramon/alec_erpnext,suyashphadtare/vestasi-update-erp,gangadharkadam/contributionerp,fuhongliang/erpnext,netfirms/erpnext,indictranstech/osmosis-erpnext,SPKian/Testing,meisterkleister/erpnext,gangadhar-kadam/verve_live_erp,tmimori/erpnext,hatwar/focal-erpnext,rohitwaghchaure/GenieManager-erpnext,indictranstech/tele-erpnext,gangadharkadam/saloon_erp_install,indictranstech/fbd_erpnext,pawaranand/phrerp,gangadharkadam/v6_erp,suyashphadtare/sajil-final-erp,indictranstech/buyback-erp,treejames/erpnext,suyashphadtare/test,mbauskar/alec_frappe5_erpnext,suyashphadtare/vestasi-erp-jan-end,SPKian/Testing,tmimori/erpnext,gangadharkadam/v4_erp,suyashphadtare/vestasi-erp-1,indictranstech/phrerp,suyashphadtare/sajil-final-erp,netfirms/erpnext,gmarke/erpnext,BhupeshGupta/erpnext,indictranstech/tele-erpnext,Tejal011089/osmosis_erpnext,hatwar/focal-erpnext,Tejal011089/digitales_erpnext,suyashphadtare/vestasi-erp-jan-end,MartinEnder/erpnext-de,Suninus/erpnext,indictranstech/Das_Erpnext,Tejal011089/osmosis_erpnext,rohitwaghchaure/GenieManager-erpnext,gangadhar-kadam/verve_live_erp,rohitwaghchaure/erpnext-receipher,Tejal011089/digitales_erpnext,shitolepriya/test-erp,shft117/SteckerApp,indictranstech/osmosis-erpnext,Tejal011089/fbd_erpnext,gangadhar-kadam/verve_live_erp,hatwar/buyback-erpnext,shft117/SteckerApp,tmimori/erpnext,dieface/erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/v5_erp,indictranstech/phrerp,gangadhar-kadam/verve_erp,indictranstech/internal-erpnext,hatwar/buyback-erpnext,gangadharkadam/verveerp,njmube/erpnext,rohitwaghchaure/erpnext-receipher,Aptitudetech/ERPNext,aruizramon/alec_erpnext,gsnbng/erpnext,susuchina/ERPNEXT,gangadharkadam/v5_erp,hanselke/erpnext-1,rohitwaghchaure/digitales_erpnext,rohitwaghchaure/digitales_erpnext,Drooids/erpnext,susuchina/ERPNEXT,njmube/erpnext,mbauskar/omnitech-demo-erpnext,gangadharkadam/v6_erp,dieface/erpnext,gangadharkadam/contributionerp,ShashaQin/erpnext,saurabh6790/test-erp,suyashphadtare/vestasi-erp-1,SPKian/Testing,suyashphadtare/test,rohitwaghchaure/New_Theme_Erp,hernad/erpnext,rohitwaghchaure/New_Theme_Erp,meisterkleister/erpnext,hatwar/Das_erpnext,4commerce-technologies-AG/erpnext,Tejal011089/osmosis_erpnext,mahabuber/erpnext,suyashphadtare/vestasi-update-erp,pombredanne/erpnext,Tejal011089/trufil-erpnext,gangadharkadam/saloon_erp_install,SPKian/Testing2,mbauskar/Das_Erpnext,indictranstech/reciphergroup-erpnext,ThiagoGarciaAlves/erpnext,gangadharkadam/contributionerp,suyashphadtare/gd-erp,shitolepriya/test-erp,gangadhar-kadam/verve_erp,gangadhar-kadam/helpdesk-erpnext,Tejal011089/fbd_erpnext,pombredanne/erpnext,dieface/erpnext,hatwar/focal-erpnext,ShashaQin/erpnext,indictranstech/vestasi-erpnext,gangadhar-kadam/latestchurcherp,gangadhar-kadam/verve-erp,gangadhar-kadam/latestchurcherp,indictranstech/biggift-erpnext,fuhongliang/erpnext,suyashphadtare/sajil-erp,shft117/SteckerApp,indictranstech/erpnext,mbauskar/phrerp,BhupeshGupta/erpnext,indictranstech/tele-erpnext,SPKian/Testing2,aruizramon/alec_erpnext,indictranstech/fbd_erpnext,rohitwaghchaure/erpnext_smart,hatwar/Das_erpnext,pawaranand/phrerp,pawaranand/phrerp,Tejal011089/osmosis_erpnext,sagar30051991/ozsmart-erp,anandpdoshi/erpnext,suyashphadtare/test,indictranstech/phrerp,indictranstech/erpnext,Tejal011089/paypal_erpnext,indictranstech/internal-erpnext,gangadharkadam/saloon_erp,gmarke/erpnext,Tejal011089/paypal_erpnext,sagar30051991/ozsmart-erp,BhupeshGupta/erpnext,Drooids/erpnext,Suninus/erpnext,gangadharkadam/vlinkerp,gangadhar-kadam/verve_live_erp,meisterkleister/erpnext,mbauskar/omnitech-erpnext,gangadharkadam/letzerp,mbauskar/omnitech-demo-erpnext,rohitwaghchaure/erpnext-receipher,indictranstech/focal-erpnext,Tejal011089/paypal_erpnext,Tejal011089/huntercamp_erpnext,indictranstech/erpnext
|
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
+ frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
Fix default website style patch (reload doc)
|
## Code Before:
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
## Instruction:
Fix default website style patch (reload doc)
## Code After:
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
...
def execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
...
|
53aa7104616944f6206f8e2cd3684b0084158a11
|
fabfile.py
|
fabfile.py
|
from fabric.api import env, cd, run, shell_env, sudo
env.hosts = ['134.213.147.235']
env.user = 'root'
env.key_filename = '~/.ssh/id_di'
env.forward_agent = True
def deploy():
with cd('/srv/venv/mysite'):
run('git pull')
with cd('mysite'), shell_env(DJANGO_CONFIGURATION='Production'):
sudo(' ../../bin/python manage.py collectstatic --noinput',
user="pydev")
run('apachectl graceful')
|
from fabric.api import env, cd, run, shell_env, sudo
env.hosts = ['134.213.147.235']
env.user = 'root'
env.key_filename = '~/.ssh/id_di'
env.forward_agent = True
def deploy():
with cd('/srv/venv/mysite'):
run('git pull')
with cd('mysite'), shell_env(DJANGO_CONFIGURATION='Production'):
sudo(' ../../bin/python manage.py collectstatic --noinput',
user="pydev")
run('apachectl graceful')
def builddb():
with cd('/srv/venv/mysite/mysite'):
with shell_env(DJANGO_CONFIGURATION='Production'):
sudo("echo 'drop database app_data;create database app_data' | ../../bin/python manage.py dbshell",
user="pydev")
sudo(' ../../bin/python manage.py migrate',
user="pydev")
sudo(' ../../bin/python manage.py runscript setup_test_data',
user="pydev")
|
Add fab task to rebuild production database
|
Add fab task to rebuild production database
|
Python
|
bsd-3-clause
|
Kvoti/ditto,Kvoti/ditto,Kvoti/ditto,Kvoti/ditto,Kvoti/ditto
|
from fabric.api import env, cd, run, shell_env, sudo
env.hosts = ['134.213.147.235']
env.user = 'root'
env.key_filename = '~/.ssh/id_di'
env.forward_agent = True
def deploy():
with cd('/srv/venv/mysite'):
run('git pull')
with cd('mysite'), shell_env(DJANGO_CONFIGURATION='Production'):
sudo(' ../../bin/python manage.py collectstatic --noinput',
user="pydev")
run('apachectl graceful')
+
+ def builddb():
+ with cd('/srv/venv/mysite/mysite'):
+ with shell_env(DJANGO_CONFIGURATION='Production'):
+ sudo("echo 'drop database app_data;create database app_data' | ../../bin/python manage.py dbshell",
+ user="pydev")
+ sudo(' ../../bin/python manage.py migrate',
+ user="pydev")
+ sudo(' ../../bin/python manage.py runscript setup_test_data',
+ user="pydev")
+
|
Add fab task to rebuild production database
|
## Code Before:
from fabric.api import env, cd, run, shell_env, sudo
env.hosts = ['134.213.147.235']
env.user = 'root'
env.key_filename = '~/.ssh/id_di'
env.forward_agent = True
def deploy():
with cd('/srv/venv/mysite'):
run('git pull')
with cd('mysite'), shell_env(DJANGO_CONFIGURATION='Production'):
sudo(' ../../bin/python manage.py collectstatic --noinput',
user="pydev")
run('apachectl graceful')
## Instruction:
Add fab task to rebuild production database
## Code After:
from fabric.api import env, cd, run, shell_env, sudo
env.hosts = ['134.213.147.235']
env.user = 'root'
env.key_filename = '~/.ssh/id_di'
env.forward_agent = True
def deploy():
with cd('/srv/venv/mysite'):
run('git pull')
with cd('mysite'), shell_env(DJANGO_CONFIGURATION='Production'):
sudo(' ../../bin/python manage.py collectstatic --noinput',
user="pydev")
run('apachectl graceful')
def builddb():
with cd('/srv/venv/mysite/mysite'):
with shell_env(DJANGO_CONFIGURATION='Production'):
sudo("echo 'drop database app_data;create database app_data' | ../../bin/python manage.py dbshell",
user="pydev")
sudo(' ../../bin/python manage.py migrate',
user="pydev")
sudo(' ../../bin/python manage.py runscript setup_test_data',
user="pydev")
|
...
run('apachectl graceful')
def builddb():
with cd('/srv/venv/mysite/mysite'):
with shell_env(DJANGO_CONFIGURATION='Production'):
sudo("echo 'drop database app_data;create database app_data' | ../../bin/python manage.py dbshell",
user="pydev")
sudo(' ../../bin/python manage.py migrate',
user="pydev")
sudo(' ../../bin/python manage.py runscript setup_test_data',
user="pydev")
...
|
8c819a1cb9df54c00b7246a07e2ba832b763876d
|
stream_django/templatetags/activity_tags.py
|
stream_django/templatetags/activity_tags.py
|
from django import template
from django.template import Context, loader
from stream_django.exceptions import MissingDataException
import logging
logger = logging.getLogger(__name__)
register = template.Library()
LOG = 'warn'
IGNORE = 'ignore'
FAIL = 'fail'
missing_data_policies = [LOG, IGNORE, FAIL]
def handle_not_enriched_data(activity, policy):
message = 'could not enrich field(s) %r for activity #%s' % (activity.not_enriched_data, activity.get('id'))
if policy == IGNORE:
pass
elif policy == FAIL:
raise MissingDataException(message)
elif policy == LOG:
logger.warn(message)
else:
raise TypeError('%s is not a valid missing_data_policy' % policy)
def render_activity(context, activity, template_prefix='', missing_data_policy=LOG):
if hasattr(activity, 'enriched') and not activity.enriched:
handle_not_enriched_data(activity, missing_data_policy)
return ''
if template_prefix != '':
template_prefix = '%s_' % template_prefix
if 'activities' in activity:
template_name = "activity/aggregated/%s%s.html" % (template_prefix, activity['verb'])
else:
template_name = "activity/%s%s.html" % (template_prefix, activity['verb'])
tmpl = loader.get_template(template_name)
context['activity'] = activity
context = Context(context)
return tmpl.render(context)
register.simple_tag(takes_context=True)(render_activity)
|
from django import template
from django.template import loader
from stream_django.exceptions import MissingDataException
import logging
logger = logging.getLogger(__name__)
register = template.Library()
LOG = 'warn'
IGNORE = 'ignore'
FAIL = 'fail'
missing_data_policies = [LOG, IGNORE, FAIL]
def handle_not_enriched_data(activity, policy):
message = 'could not enrich field(s) %r for activity #%s' % (activity.not_enriched_data, activity.get('id'))
if policy == IGNORE:
pass
elif policy == FAIL:
raise MissingDataException(message)
elif policy == LOG:
logger.warn(message)
else:
raise TypeError('%s is not a valid missing_data_policy' % policy)
def render_activity(context, activity, template_prefix='', missing_data_policy=LOG):
if hasattr(activity, 'enriched') and not activity.enriched:
handle_not_enriched_data(activity, missing_data_policy)
return ''
if template_prefix != '':
template_prefix = '%s_' % template_prefix
if 'activities' in activity:
template_name = "activity/aggregated/%s%s.html" % (template_prefix, activity['verb'])
else:
template_name = "activity/%s%s.html" % (template_prefix, activity['verb'])
tmpl = loader.get_template(template_name)
context['activity'] = activity
return tmpl.render(context)
register.simple_tag(takes_context=True)(render_activity)
|
Use dict as a context object for Django 1.11 compatibility
|
Use dict as a context object for Django 1.11 compatibility
Django’s template rendering in 1.11 needs a dictionary as the context
instead of the object Context, otherwise the following error is raised:
context must be a dict rather than Context.
|
Python
|
bsd-3-clause
|
GetStream/stream-django,GetStream/stream-django
|
from django import template
- from django.template import Context, loader
+ from django.template import loader
from stream_django.exceptions import MissingDataException
import logging
logger = logging.getLogger(__name__)
register = template.Library()
LOG = 'warn'
IGNORE = 'ignore'
FAIL = 'fail'
missing_data_policies = [LOG, IGNORE, FAIL]
def handle_not_enriched_data(activity, policy):
message = 'could not enrich field(s) %r for activity #%s' % (activity.not_enriched_data, activity.get('id'))
if policy == IGNORE:
pass
elif policy == FAIL:
raise MissingDataException(message)
elif policy == LOG:
logger.warn(message)
else:
raise TypeError('%s is not a valid missing_data_policy' % policy)
def render_activity(context, activity, template_prefix='', missing_data_policy=LOG):
if hasattr(activity, 'enriched') and not activity.enriched:
handle_not_enriched_data(activity, missing_data_policy)
return ''
if template_prefix != '':
template_prefix = '%s_' % template_prefix
if 'activities' in activity:
template_name = "activity/aggregated/%s%s.html" % (template_prefix, activity['verb'])
else:
template_name = "activity/%s%s.html" % (template_prefix, activity['verb'])
tmpl = loader.get_template(template_name)
context['activity'] = activity
- context = Context(context)
return tmpl.render(context)
register.simple_tag(takes_context=True)(render_activity)
|
Use dict as a context object for Django 1.11 compatibility
|
## Code Before:
from django import template
from django.template import Context, loader
from stream_django.exceptions import MissingDataException
import logging
logger = logging.getLogger(__name__)
register = template.Library()
LOG = 'warn'
IGNORE = 'ignore'
FAIL = 'fail'
missing_data_policies = [LOG, IGNORE, FAIL]
def handle_not_enriched_data(activity, policy):
message = 'could not enrich field(s) %r for activity #%s' % (activity.not_enriched_data, activity.get('id'))
if policy == IGNORE:
pass
elif policy == FAIL:
raise MissingDataException(message)
elif policy == LOG:
logger.warn(message)
else:
raise TypeError('%s is not a valid missing_data_policy' % policy)
def render_activity(context, activity, template_prefix='', missing_data_policy=LOG):
if hasattr(activity, 'enriched') and not activity.enriched:
handle_not_enriched_data(activity, missing_data_policy)
return ''
if template_prefix != '':
template_prefix = '%s_' % template_prefix
if 'activities' in activity:
template_name = "activity/aggregated/%s%s.html" % (template_prefix, activity['verb'])
else:
template_name = "activity/%s%s.html" % (template_prefix, activity['verb'])
tmpl = loader.get_template(template_name)
context['activity'] = activity
context = Context(context)
return tmpl.render(context)
register.simple_tag(takes_context=True)(render_activity)
## Instruction:
Use dict as a context object for Django 1.11 compatibility
## Code After:
from django import template
from django.template import loader
from stream_django.exceptions import MissingDataException
import logging
logger = logging.getLogger(__name__)
register = template.Library()
LOG = 'warn'
IGNORE = 'ignore'
FAIL = 'fail'
missing_data_policies = [LOG, IGNORE, FAIL]
def handle_not_enriched_data(activity, policy):
message = 'could not enrich field(s) %r for activity #%s' % (activity.not_enriched_data, activity.get('id'))
if policy == IGNORE:
pass
elif policy == FAIL:
raise MissingDataException(message)
elif policy == LOG:
logger.warn(message)
else:
raise TypeError('%s is not a valid missing_data_policy' % policy)
def render_activity(context, activity, template_prefix='', missing_data_policy=LOG):
if hasattr(activity, 'enriched') and not activity.enriched:
handle_not_enriched_data(activity, missing_data_policy)
return ''
if template_prefix != '':
template_prefix = '%s_' % template_prefix
if 'activities' in activity:
template_name = "activity/aggregated/%s%s.html" % (template_prefix, activity['verb'])
else:
template_name = "activity/%s%s.html" % (template_prefix, activity['verb'])
tmpl = loader.get_template(template_name)
context['activity'] = activity
return tmpl.render(context)
register.simple_tag(takes_context=True)(render_activity)
|
...
from django import template
from django.template import loader
from stream_django.exceptions import MissingDataException
...
context['activity'] = activity
return tmpl.render(context)
...
|
04557bbff362ae3b89e7dd98a1fb11e0aaeba50e
|
common/djangoapps/student/migrations/0010_auto_20170207_0458.py
|
common/djangoapps/student/migrations/0010_auto_20170207_0458.py
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0009_auto_20170111_0422'),
]
# This migration was to add a constraint that we lost in the Django
# 1.4->1.8 upgrade. But since the constraint used to be created, production
# would already have the constraint even before running the migration, and
# running the migration would fail. We needed to make the migration
# idempotent. Instead of reverting this migration while we did that, we
# edited it to be a SQL no-op, so that people who had already applied it
# wouldn't end up with a ghost migration.
# It had been:
#
# migrations.RunSQL(
# "create unique index email on auth_user (email);",
# "drop index email on auth_user;"
# )
operations = [
migrations.RunSQL(
# Do nothing:
"select 1",
"select 1"
)
]
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0009_auto_20170111_0422'),
]
# This migration was to add a constraint that we lost in the Django
# 1.4->1.8 upgrade. But since the constraint used to be created, production
# would already have the constraint even before running the migration, and
# running the migration would fail. We needed to make the migration
# idempotent. Instead of reverting this migration while we did that, we
# edited it to be a SQL no-op, so that people who had already applied it
# wouldn't end up with a ghost migration.
# It had been:
#
# migrations.RunSQL(
# "create unique index email on auth_user (email);",
# "drop index email on auth_user;"
# )
operations = [
# Nothing to do.
]
|
Make this no-op migration be a true no-op.
|
Make this no-op migration be a true no-op.
|
Python
|
agpl-3.0
|
philanthropy-u/edx-platform,lduarte1991/edx-platform,eduNEXT/edx-platform,msegado/edx-platform,cpennington/edx-platform,hastexo/edx-platform,a-parhom/edx-platform,ESOedX/edx-platform,angelapper/edx-platform,gymnasium/edx-platform,eduNEXT/edunext-platform,pepeportela/edx-platform,ESOedX/edx-platform,pepeportela/edx-platform,miptliot/edx-platform,BehavioralInsightsTeam/edx-platform,appsembler/edx-platform,msegado/edx-platform,appsembler/edx-platform,eduNEXT/edx-platform,TeachAtTUM/edx-platform,gymnasium/edx-platform,Lektorium-LLC/edx-platform,miptliot/edx-platform,edx/edx-platform,procangroup/edx-platform,mitocw/edx-platform,romain-li/edx-platform,miptliot/edx-platform,CredoReference/edx-platform,Lektorium-LLC/edx-platform,cpennington/edx-platform,proversity-org/edx-platform,fintech-circle/edx-platform,jolyonb/edx-platform,ahmedaljazzar/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,stvstnfrd/edx-platform,BehavioralInsightsTeam/edx-platform,gsehub/edx-platform,edx-solutions/edx-platform,teltek/edx-platform,cpennington/edx-platform,pepeportela/edx-platform,BehavioralInsightsTeam/edx-platform,arbrandes/edx-platform,raccoongang/edx-platform,stvstnfrd/edx-platform,ahmedaljazzar/edx-platform,hastexo/edx-platform,arbrandes/edx-platform,ESOedX/edx-platform,CredoReference/edx-platform,eduNEXT/edunext-platform,mitocw/edx-platform,stvstnfrd/edx-platform,fintech-circle/edx-platform,mitocw/edx-platform,pabloborrego93/edx-platform,gsehub/edx-platform,kmoocdev2/edx-platform,angelapper/edx-platform,jolyonb/edx-platform,gymnasium/edx-platform,eduNEXT/edx-platform,gsehub/edx-platform,msegado/edx-platform,teltek/edx-platform,TeachAtTUM/edx-platform,appsembler/edx-platform,pepeportela/edx-platform,Edraak/edraak-platform,pabloborrego93/edx-platform,a-parhom/edx-platform,fintech-circle/edx-platform,pabloborrego93/edx-platform,appsembler/edx-platform,lduarte1991/edx-platform,jolyonb/edx-platform,eduNEXT/edunext-platform,edx-solutions/edx-platform,teltek/edx-platform,proversity-org/edx-platform,proversity-org/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,Lektorium-LLC/edx-platform,edx/edx-platform,kmoocdev2/edx-platform,gsehub/edx-platform,lduarte1991/edx-platform,miptliot/edx-platform,procangroup/edx-platform,CredoReference/edx-platform,teltek/edx-platform,procangroup/edx-platform,philanthropy-u/edx-platform,arbrandes/edx-platform,EDUlib/edx-platform,Edraak/edraak-platform,Edraak/edraak-platform,jolyonb/edx-platform,raccoongang/edx-platform,gymnasium/edx-platform,fintech-circle/edx-platform,raccoongang/edx-platform,pabloborrego93/edx-platform,Stanford-Online/edx-platform,stvstnfrd/edx-platform,edx/edx-platform,kmoocdev2/edx-platform,proversity-org/edx-platform,CredoReference/edx-platform,ahmedaljazzar/edx-platform,lduarte1991/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,ahmedaljazzar/edx-platform,kmoocdev2/edx-platform,romain-li/edx-platform,Edraak/edraak-platform,EDUlib/edx-platform,romain-li/edx-platform,cpennington/edx-platform,eduNEXT/edunext-platform,ESOedX/edx-platform,edx/edx-platform,Stanford-Online/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,angelapper/edx-platform,edx-solutions/edx-platform,Lektorium-LLC/edx-platform,hastexo/edx-platform,hastexo/edx-platform,a-parhom/edx-platform,procangroup/edx-platform,BehavioralInsightsTeam/edx-platform,mitocw/edx-platform,kmoocdev2/edx-platform,msegado/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,TeachAtTUM/edx-platform,msegado/edx-platform,TeachAtTUM/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,raccoongang/edx-platform,a-parhom/edx-platform
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0009_auto_20170111_0422'),
]
# This migration was to add a constraint that we lost in the Django
# 1.4->1.8 upgrade. But since the constraint used to be created, production
# would already have the constraint even before running the migration, and
# running the migration would fail. We needed to make the migration
# idempotent. Instead of reverting this migration while we did that, we
# edited it to be a SQL no-op, so that people who had already applied it
# wouldn't end up with a ghost migration.
# It had been:
#
# migrations.RunSQL(
# "create unique index email on auth_user (email);",
# "drop index email on auth_user;"
# )
operations = [
+ # Nothing to do.
- migrations.RunSQL(
- # Do nothing:
- "select 1",
- "select 1"
- )
]
|
Make this no-op migration be a true no-op.
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0009_auto_20170111_0422'),
]
# This migration was to add a constraint that we lost in the Django
# 1.4->1.8 upgrade. But since the constraint used to be created, production
# would already have the constraint even before running the migration, and
# running the migration would fail. We needed to make the migration
# idempotent. Instead of reverting this migration while we did that, we
# edited it to be a SQL no-op, so that people who had already applied it
# wouldn't end up with a ghost migration.
# It had been:
#
# migrations.RunSQL(
# "create unique index email on auth_user (email);",
# "drop index email on auth_user;"
# )
operations = [
migrations.RunSQL(
# Do nothing:
"select 1",
"select 1"
)
]
## Instruction:
Make this no-op migration be a true no-op.
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0009_auto_20170111_0422'),
]
# This migration was to add a constraint that we lost in the Django
# 1.4->1.8 upgrade. But since the constraint used to be created, production
# would already have the constraint even before running the migration, and
# running the migration would fail. We needed to make the migration
# idempotent. Instead of reverting this migration while we did that, we
# edited it to be a SQL no-op, so that people who had already applied it
# wouldn't end up with a ghost migration.
# It had been:
#
# migrations.RunSQL(
# "create unique index email on auth_user (email);",
# "drop index email on auth_user;"
# )
operations = [
# Nothing to do.
]
|
...
operations = [
# Nothing to do.
]
...
|
e35ff2f0e45289c40a57c9488156829c60f9d3a0
|
vumi_http_proxy/clickme.py
|
vumi_http_proxy/clickme.py
|
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
cli.interface = str(interface)
cli.port = port
"""This script runs vumi-http-proxy on <interface>:<port>"""
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
|
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
"""This script runs vumi-http-proxy on <interface>:<port>"""
interface = str(interface)
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
|
Change unicode ip to string
|
Change unicode ip to string
|
Python
|
bsd-3-clause
|
praekelt/vumi-http-proxy,praekelt/vumi-http-proxy
|
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
- cli.interface = str(interface)
- cli.port = port
"""This script runs vumi-http-proxy on <interface>:<port>"""
+ interface = str(interface)
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
|
Change unicode ip to string
|
## Code Before:
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
cli.interface = str(interface)
cli.port = port
"""This script runs vumi-http-proxy on <interface>:<port>"""
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
## Instruction:
Change unicode ip to string
## Code After:
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
"""This script runs vumi-http-proxy on <interface>:<port>"""
interface = str(interface)
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
|
// ... existing code ...
def cli(interface, port):
"""This script runs vumi-http-proxy on <interface>:<port>"""
interface = str(interface)
click.echo("Starting connection to %s:%d" % (interface, port))
// ... rest of the code ...
|
200027f73a99f18eeeae4395be9622c65590916f
|
fireplace/cards/gvg/neutral_epic.py
|
fireplace/cards/gvg/neutral_epic.py
|
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
events = [
OWN_MINION_PLAY.on(
lambda self, player, card, *args: card.atk == 1 and [Buff(card, "GVG_104a")] or []
)
]
# Piloted Sky Golem
class GVG_105:
def deathrattle(self):
return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))]
# Junkbot
class GVG_106:
events = [
Death(FRIENDLY + MECH).on(Buff(SELF, "GVG_106e"))
]
# Enhance-o Mechano
class GVG_107:
def action(self):
for target in self.controller.field:
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
yield SetTag(target, {tag: True})
# Recombobulator
class GVG_108:
def action(self, target):
choice = randomCollectible(type=CardType.MINION, cost=target.cost)
return [Morph(TARGET, choice)]
# Clockwork Giant
class GVG_121:
def cost(self, value):
return value - len(self.controller.opponent.hand)
|
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
events = [
OWN_MINION_PLAY.on(
lambda self, player, card, *args: card.atk == 1 and [Buff(card, "GVG_104a")] or []
)
]
# Piloted Sky Golem
class GVG_105:
def deathrattle(self):
return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))]
# Junkbot
class GVG_106:
events = [
Death(FRIENDLY + MECH).on(Buff(SELF, "GVG_106e"))
]
# Enhance-o Mechano
class GVG_107:
def action(self):
for target in self.controller.field.exclude(self):
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
yield SetTag(target, {tag: True})
# Recombobulator
class GVG_108:
def action(self, target):
choice = randomCollectible(type=CardType.MINION, cost=target.cost)
return [Morph(TARGET, choice)]
# Clockwork Giant
class GVG_121:
def cost(self, value):
return value - len(self.controller.opponent.hand)
|
Exclude Enhance-o Mechano from its own buff targets
|
Exclude Enhance-o Mechano from its own buff targets
|
Python
|
agpl-3.0
|
oftc-ftw/fireplace,smallnamespace/fireplace,butozerca/fireplace,jleclanche/fireplace,Ragowit/fireplace,oftc-ftw/fireplace,liujimj/fireplace,Ragowit/fireplace,liujimj/fireplace,Meerkov/fireplace,Meerkov/fireplace,NightKev/fireplace,butozerca/fireplace,amw2104/fireplace,beheh/fireplace,smallnamespace/fireplace,amw2104/fireplace
|
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
events = [
OWN_MINION_PLAY.on(
lambda self, player, card, *args: card.atk == 1 and [Buff(card, "GVG_104a")] or []
)
]
# Piloted Sky Golem
class GVG_105:
def deathrattle(self):
return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))]
# Junkbot
class GVG_106:
events = [
Death(FRIENDLY + MECH).on(Buff(SELF, "GVG_106e"))
]
# Enhance-o Mechano
class GVG_107:
def action(self):
- for target in self.controller.field:
+ for target in self.controller.field.exclude(self):
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
yield SetTag(target, {tag: True})
# Recombobulator
class GVG_108:
def action(self, target):
choice = randomCollectible(type=CardType.MINION, cost=target.cost)
return [Morph(TARGET, choice)]
# Clockwork Giant
class GVG_121:
def cost(self, value):
return value - len(self.controller.opponent.hand)
|
Exclude Enhance-o Mechano from its own buff targets
|
## Code Before:
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
events = [
OWN_MINION_PLAY.on(
lambda self, player, card, *args: card.atk == 1 and [Buff(card, "GVG_104a")] or []
)
]
# Piloted Sky Golem
class GVG_105:
def deathrattle(self):
return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))]
# Junkbot
class GVG_106:
events = [
Death(FRIENDLY + MECH).on(Buff(SELF, "GVG_106e"))
]
# Enhance-o Mechano
class GVG_107:
def action(self):
for target in self.controller.field:
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
yield SetTag(target, {tag: True})
# Recombobulator
class GVG_108:
def action(self, target):
choice = randomCollectible(type=CardType.MINION, cost=target.cost)
return [Morph(TARGET, choice)]
# Clockwork Giant
class GVG_121:
def cost(self, value):
return value - len(self.controller.opponent.hand)
## Instruction:
Exclude Enhance-o Mechano from its own buff targets
## Code After:
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
events = [
OWN_MINION_PLAY.on(
lambda self, player, card, *args: card.atk == 1 and [Buff(card, "GVG_104a")] or []
)
]
# Piloted Sky Golem
class GVG_105:
def deathrattle(self):
return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))]
# Junkbot
class GVG_106:
events = [
Death(FRIENDLY + MECH).on(Buff(SELF, "GVG_106e"))
]
# Enhance-o Mechano
class GVG_107:
def action(self):
for target in self.controller.field.exclude(self):
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
yield SetTag(target, {tag: True})
# Recombobulator
class GVG_108:
def action(self, target):
choice = randomCollectible(type=CardType.MINION, cost=target.cost)
return [Morph(TARGET, choice)]
# Clockwork Giant
class GVG_121:
def cost(self, value):
return value - len(self.controller.opponent.hand)
|
// ... existing code ...
def action(self):
for target in self.controller.field.exclude(self):
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
// ... rest of the code ...
|
e6210531dac1d7efd5fd4d343dcac74a0b74515e
|
request_profiler/settings.py
|
request_profiler/settings.py
|
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC', lambda r: True)
|
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(
settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC',
lambda r: not (hasattr(r, 'user') and r.user.is_staff)
)
|
Update GLOBAL_EXCLUDE_FUNC default to exclude admins
|
Update GLOBAL_EXCLUDE_FUNC default to exclude admins
|
Python
|
mit
|
yunojuno/django-request-profiler,yunojuno/django-request-profiler,sigshen/django-request-profiler,sigshen/django-request-profiler
|
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
- GLOBAL_EXCLUDE_FUNC = getattr(settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC', lambda r: True)
+ GLOBAL_EXCLUDE_FUNC = getattr(
+ settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC',
+ lambda r: not (hasattr(r, 'user') and r.user.is_staff)
+ )
|
Update GLOBAL_EXCLUDE_FUNC default to exclude admins
|
## Code Before:
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC', lambda r: True)
## Instruction:
Update GLOBAL_EXCLUDE_FUNC default to exclude admins
## Code After:
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(
settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC',
lambda r: not (hasattr(r, 'user') and r.user.is_staff)
)
|
// ... existing code ...
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(
settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC',
lambda r: not (hasattr(r, 'user') and r.user.is_staff)
)
// ... rest of the code ...
|
cfe4148feac51a9be6ff74e978a22f1493adff8b
|
doajtest/unit/test_tasks_sitemap.py
|
doajtest/unit/test_tasks_sitemap.py
|
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import os, shutil, time
from portality.lib import paths
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(1.5)
assert len(self.mainStore.list(self.container_id)) == 1
|
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import time
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(2)
assert len(self.mainStore.list(self.container_id)) == 1
|
Increase timeout for slow test
|
Increase timeout for slow test
|
Python
|
apache-2.0
|
DOAJ/doaj,DOAJ/doaj,DOAJ/doaj,DOAJ/doaj
|
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
+ import time
- import os, shutil, time
- from portality.lib import paths
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
- time.sleep(1.5)
+ time.sleep(2)
assert len(self.mainStore.list(self.container_id)) == 1
+
|
Increase timeout for slow test
|
## Code Before:
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import os, shutil, time
from portality.lib import paths
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(1.5)
assert len(self.mainStore.list(self.container_id)) == 1
## Instruction:
Increase timeout for slow test
## Code After:
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import time
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(2)
assert len(self.mainStore.list(self.container_id)) == 1
|
// ... existing code ...
from portality.background import BackgroundApi
import time
from portality.store import StoreFactory
// ... modified code ...
BackgroundApi.execute(task)
time.sleep(2)
assert len(self.mainStore.list(self.container_id)) == 1
// ... rest of the code ...
|
7072389221f7e287328cecc695b93a77d04c69ba
|
tests/basecli_test.py
|
tests/basecli_test.py
|
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def test_init(self):
assert self.app.main(['ass2m_test', 'init']) in (0, None)
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
|
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
import sys
import re
from StringIO import StringIO
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def beginCapture(self):
self.stdout = sys.stdout
# begin capture
sys.stdout = StringIO()
def endCapture(self):
captured = sys.stdout
# end capture
sys.stdout = self.stdout
self.stdout = None
return captured.getvalue()
def test_init(self):
self.beginCapture()
assert self.app.main(['ass2m_test', 'init']) in (0, None)
output = self.endCapture()
assert output.strip() == "Ass2m working directory created."
self.beginCapture()
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
output = self.endCapture()
assert re.match(re.escape(r'/')+r'\s+'+re.escape(r'all(rl-)'), output, re.S)
assert re.match(".+"+re.escape(r'/.ass2m/')+r'\s+'+re.escape(r'all(---)'), output, re.S)
|
Test and capture the CLI output
|
Test and capture the CLI output
|
Python
|
agpl-3.0
|
laurentb/assnet,laurentb/assnet
|
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
+ import sys
+ import re
+ from StringIO import StringIO
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
+ def beginCapture(self):
+ self.stdout = sys.stdout
+ # begin capture
+ sys.stdout = StringIO()
+
+ def endCapture(self):
+ captured = sys.stdout
+ # end capture
+ sys.stdout = self.stdout
+ self.stdout = None
+ return captured.getvalue()
+
def test_init(self):
+ self.beginCapture()
assert self.app.main(['ass2m_test', 'init']) in (0, None)
+ output = self.endCapture()
+ assert output.strip() == "Ass2m working directory created."
+ self.beginCapture()
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
+ output = self.endCapture()
+ assert re.match(re.escape(r'/')+r'\s+'+re.escape(r'all(rl-)'), output, re.S)
+ assert re.match(".+"+re.escape(r'/.ass2m/')+r'\s+'+re.escape(r'all(---)'), output, re.S)
|
Test and capture the CLI output
|
## Code Before:
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def test_init(self):
assert self.app.main(['ass2m_test', 'init']) in (0, None)
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
## Instruction:
Test and capture the CLI output
## Code After:
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
import sys
import re
from StringIO import StringIO
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def beginCapture(self):
self.stdout = sys.stdout
# begin capture
sys.stdout = StringIO()
def endCapture(self):
captured = sys.stdout
# end capture
sys.stdout = self.stdout
self.stdout = None
return captured.getvalue()
def test_init(self):
self.beginCapture()
assert self.app.main(['ass2m_test', 'init']) in (0, None)
output = self.endCapture()
assert output.strip() == "Ass2m working directory created."
self.beginCapture()
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
output = self.endCapture()
assert re.match(re.escape(r'/')+r'\s+'+re.escape(r'all(rl-)'), output, re.S)
assert re.match(".+"+re.escape(r'/.ass2m/')+r'\s+'+re.escape(r'all(---)'), output, re.S)
|
// ... existing code ...
import shutil
import sys
import re
from StringIO import StringIO
// ... modified code ...
def beginCapture(self):
self.stdout = sys.stdout
# begin capture
sys.stdout = StringIO()
def endCapture(self):
captured = sys.stdout
# end capture
sys.stdout = self.stdout
self.stdout = None
return captured.getvalue()
def test_init(self):
self.beginCapture()
assert self.app.main(['ass2m_test', 'init']) in (0, None)
output = self.endCapture()
assert output.strip() == "Ass2m working directory created."
self.beginCapture()
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
output = self.endCapture()
assert re.match(re.escape(r'/')+r'\s+'+re.escape(r'all(rl-)'), output, re.S)
assert re.match(".+"+re.escape(r'/.ass2m/')+r'\s+'+re.escape(r'all(---)'), output, re.S)
// ... rest of the code ...
|
ace1997f5d1cab297ab68886501b45602b2d8e2d
|
cards/models.py
|
cards/models.py
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from time import time
def card_image_filename(instance, filename):
timestamp = int(time())
return 'cards/%s%d.jpg' % (instance, timestamp)
@python_2_unicode_compatible
class Card(models.Model):
title = models.CharField(max_length=140, unique=True)
is_title_visible = models.BooleanField(default=True)
text = models.TextField()
secondary_text = models.TextField(null=True, blank=True)
author = models.CharField(max_length=100, null=True, blank=True)
image = models.ImageField(upload_to=card_image_filename, null=True, blank=True)
creation_datetime = models.DateTimeField(auto_now_add=True)
update_datetime = models.DateTimeField(auto_now=True)
is_active = models.BooleanField(default=True)
created_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_created_by')
updated_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_updated_by')
def __str__(self):
return self.title
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from time import time
def card_image_filename(instance):
timestamp = int(time())
return 'cards/%s%d.jpg' % (instance, timestamp)
@python_2_unicode_compatible
class Card(models.Model):
title = models.CharField(max_length=140, unique=True)
is_title_visible = models.BooleanField(default=True)
text = models.TextField()
secondary_text = models.TextField(null=True, blank=True)
author = models.CharField(max_length=100, null=True, blank=True)
image = models.ImageField(upload_to=card_image_filename, null=True, blank=True)
creation_datetime = models.DateTimeField(auto_now_add=True)
update_datetime = models.DateTimeField(auto_now=True)
is_active = models.BooleanField(default=True)
created_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_created_by')
updated_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_updated_by')
def __str__(self):
return self.title
|
Remove unused filename parameter from card image filename function
|
Remove unused filename parameter from card image filename function
|
Python
|
mit
|
neosergio/WisdomBox
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from time import time
- def card_image_filename(instance, filename):
+ def card_image_filename(instance):
timestamp = int(time())
return 'cards/%s%d.jpg' % (instance, timestamp)
@python_2_unicode_compatible
class Card(models.Model):
title = models.CharField(max_length=140, unique=True)
is_title_visible = models.BooleanField(default=True)
text = models.TextField()
secondary_text = models.TextField(null=True, blank=True)
author = models.CharField(max_length=100, null=True, blank=True)
image = models.ImageField(upload_to=card_image_filename, null=True, blank=True)
creation_datetime = models.DateTimeField(auto_now_add=True)
update_datetime = models.DateTimeField(auto_now=True)
is_active = models.BooleanField(default=True)
created_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_created_by')
updated_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_updated_by')
def __str__(self):
return self.title
|
Remove unused filename parameter from card image filename function
|
## Code Before:
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from time import time
def card_image_filename(instance, filename):
timestamp = int(time())
return 'cards/%s%d.jpg' % (instance, timestamp)
@python_2_unicode_compatible
class Card(models.Model):
title = models.CharField(max_length=140, unique=True)
is_title_visible = models.BooleanField(default=True)
text = models.TextField()
secondary_text = models.TextField(null=True, blank=True)
author = models.CharField(max_length=100, null=True, blank=True)
image = models.ImageField(upload_to=card_image_filename, null=True, blank=True)
creation_datetime = models.DateTimeField(auto_now_add=True)
update_datetime = models.DateTimeField(auto_now=True)
is_active = models.BooleanField(default=True)
created_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_created_by')
updated_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_updated_by')
def __str__(self):
return self.title
## Instruction:
Remove unused filename parameter from card image filename function
## Code After:
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from time import time
def card_image_filename(instance):
timestamp = int(time())
return 'cards/%s%d.jpg' % (instance, timestamp)
@python_2_unicode_compatible
class Card(models.Model):
title = models.CharField(max_length=140, unique=True)
is_title_visible = models.BooleanField(default=True)
text = models.TextField()
secondary_text = models.TextField(null=True, blank=True)
author = models.CharField(max_length=100, null=True, blank=True)
image = models.ImageField(upload_to=card_image_filename, null=True, blank=True)
creation_datetime = models.DateTimeField(auto_now_add=True)
update_datetime = models.DateTimeField(auto_now=True)
is_active = models.BooleanField(default=True)
created_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_created_by')
updated_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_updated_by')
def __str__(self):
return self.title
|
// ... existing code ...
def card_image_filename(instance):
timestamp = int(time())
// ... rest of the code ...
|
c14f9c661e485243660970d3a76014b8e6b7f1af
|
src-python/setup.py
|
src-python/setup.py
|
from distutils.core import setup
import py2exe
setup(console=['process.py'])
|
from distutils.core import setup
import py2exe, sys
sys.argv.append('py2exe')
setup(
options = {'py2exe': {'bundle_files': 1, 'compressed': True}},
console = [{'script': "process.py"}],
zipfile = None,
)
|
Add options to generate single executable file
|
Add options to generate single executable file
|
Python
|
mit
|
yaa110/Adobe-Air-Registry-Modifier
|
from distutils.core import setup
- import py2exe
+ import py2exe, sys
- setup(console=['process.py'])
+ sys.argv.append('py2exe')
+ setup(
+ options = {'py2exe': {'bundle_files': 1, 'compressed': True}},
+ console = [{'script': "process.py"}],
+ zipfile = None,
+ )
+
|
Add options to generate single executable file
|
## Code Before:
from distutils.core import setup
import py2exe
setup(console=['process.py'])
## Instruction:
Add options to generate single executable file
## Code After:
from distutils.core import setup
import py2exe, sys
sys.argv.append('py2exe')
setup(
options = {'py2exe': {'bundle_files': 1, 'compressed': True}},
console = [{'script': "process.py"}],
zipfile = None,
)
|
# ... existing code ...
from distutils.core import setup
import py2exe, sys
sys.argv.append('py2exe')
setup(
options = {'py2exe': {'bundle_files': 1, 'compressed': True}},
console = [{'script': "process.py"}],
zipfile = None,
)
# ... rest of the code ...
|
e5e61e4d2575a39d585b3c51c082b2b53bade7bb
|
django_sphinx_db/backend/sphinx/base.py
|
django_sphinx_db/backend/sphinx/base.py
|
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
|
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
# The following can be useful for unit testing, with multiple databases
# configured in Django, if one of them does not support transactions,
# Django will fall back to using clear/create (instead of begin...rollback)
# between each test. The method Django uses to detect transactions uses
# CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
# transactions ARE. Therefore, we can just set this to True, and Django will
# use transactions for clearing data between tests when all OTHER backends
# support it.
self.features.supports_transactions = True
|
Speed up unit tests when Sphinx DB is configured
|
Speed up unit tests when Sphinx DB is configured
|
Python
|
bsd-3-clause
|
smartfile/django-sphinx-db,rutube/django-sphinx-db,anatoliy-larin/django-sphinx-db,jnormore/django-sphinx-db,petekalo/django-sphinx-db
|
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
+ # The following can be useful for unit testing, with multiple databases
+ # configured in Django, if one of them does not support transactions,
+ # Django will fall back to using clear/create (instead of begin...rollback)
+ # between each test. The method Django uses to detect transactions uses
+ # CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
+ # transactions ARE. Therefore, we can just set this to True, and Django will
+ # use transactions for clearing data between tests when all OTHER backends
+ # support it.
+ self.features.supports_transactions = True
|
Speed up unit tests when Sphinx DB is configured
|
## Code Before:
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
## Instruction:
Speed up unit tests when Sphinx DB is configured
## Code After:
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
# The following can be useful for unit testing, with multiple databases
# configured in Django, if one of them does not support transactions,
# Django will fall back to using clear/create (instead of begin...rollback)
# between each test. The method Django uses to detect transactions uses
# CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
# transactions ARE. Therefore, we can just set this to True, and Django will
# use transactions for clearing data between tests when all OTHER backends
# support it.
self.features.supports_transactions = True
|
// ... existing code ...
self.creation = SphinxCreation(self)
# The following can be useful for unit testing, with multiple databases
# configured in Django, if one of them does not support transactions,
# Django will fall back to using clear/create (instead of begin...rollback)
# between each test. The method Django uses to detect transactions uses
# CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
# transactions ARE. Therefore, we can just set this to True, and Django will
# use transactions for clearing data between tests when all OTHER backends
# support it.
self.features.supports_transactions = True
// ... rest of the code ...
|
b1b8e06b2b0ae6c79b94bd8e7b0b49721b7bdc13
|
web/attempts/tests.py
|
web/attempts/tests.py
|
from django.test import TestCase
# Create your tests here.
|
from django.test import TestCase
from rest_framework.test import APIClient
from users.models import User
# Create your tests here.
class TokenLoginTestCase(TestCase):
fixtures = ['users.json']
def testAttemptSubmit(self):
user = User.objects.get(username='matija')
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + user.auth_token.key)
response = client.post('/api/attempts/submit/',
[
{
"solution": "\ndef linearna(a, b):\\n return -b / a\\n",
"valid": True,
"feedback": ["prvi", "drugi feedbk"],
"secret": [], "part": 1
},
{
"solution": "\\nimport math\\n\\ndef ploscina(n, a):\\n"
"return 0.25 a**2 n / math.tan(math.pi / n)",
"valid": True,
"feedback": [],
"secret": [],
"part": 2
}
],
format='json'
)
self.assertEqual(response.status_code, 200)
|
Add simple Attempt submit test
|
Add simple Attempt submit test
|
Python
|
agpl-3.0
|
matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo
|
from django.test import TestCase
+ from rest_framework.test import APIClient
+ from users.models import User
+
# Create your tests here.
+ class TokenLoginTestCase(TestCase):
+ fixtures = ['users.json']
+ def testAttemptSubmit(self):
+ user = User.objects.get(username='matija')
+ client = APIClient()
+ client.credentials(HTTP_AUTHORIZATION='Token ' + user.auth_token.key)
+ response = client.post('/api/attempts/submit/',
+ [
+ {
+ "solution": "\ndef linearna(a, b):\\n return -b / a\\n",
+ "valid": True,
+ "feedback": ["prvi", "drugi feedbk"],
+ "secret": [], "part": 1
+ },
+ {
+ "solution": "\\nimport math\\n\\ndef ploscina(n, a):\\n"
+ "return 0.25 a**2 n / math.tan(math.pi / n)",
+ "valid": True,
+ "feedback": [],
+ "secret": [],
+ "part": 2
+ }
+ ],
+ format='json'
+ )
+ self.assertEqual(response.status_code, 200)
+
|
Add simple Attempt submit test
|
## Code Before:
from django.test import TestCase
# Create your tests here.
## Instruction:
Add simple Attempt submit test
## Code After:
from django.test import TestCase
from rest_framework.test import APIClient
from users.models import User
# Create your tests here.
class TokenLoginTestCase(TestCase):
fixtures = ['users.json']
def testAttemptSubmit(self):
user = User.objects.get(username='matija')
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + user.auth_token.key)
response = client.post('/api/attempts/submit/',
[
{
"solution": "\ndef linearna(a, b):\\n return -b / a\\n",
"valid": True,
"feedback": ["prvi", "drugi feedbk"],
"secret": [], "part": 1
},
{
"solution": "\\nimport math\\n\\ndef ploscina(n, a):\\n"
"return 0.25 a**2 n / math.tan(math.pi / n)",
"valid": True,
"feedback": [],
"secret": [],
"part": 2
}
],
format='json'
)
self.assertEqual(response.status_code, 200)
|
# ... existing code ...
from django.test import TestCase
from rest_framework.test import APIClient
from users.models import User
# ... modified code ...
# Create your tests here.
class TokenLoginTestCase(TestCase):
fixtures = ['users.json']
def testAttemptSubmit(self):
user = User.objects.get(username='matija')
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + user.auth_token.key)
response = client.post('/api/attempts/submit/',
[
{
"solution": "\ndef linearna(a, b):\\n return -b / a\\n",
"valid": True,
"feedback": ["prvi", "drugi feedbk"],
"secret": [], "part": 1
},
{
"solution": "\\nimport math\\n\\ndef ploscina(n, a):\\n"
"return 0.25 a**2 n / math.tan(math.pi / n)",
"valid": True,
"feedback": [],
"secret": [],
"part": 2
}
],
format='json'
)
self.assertEqual(response.status_code, 200)
# ... rest of the code ...
|
c45fc698da9783b561cca69363ec4998622e9ac0
|
mint/rest/db/capsulemgr.py
|
mint/rest/db/capsulemgr.py
|
from conary.lib import util
from mint.rest.db import manager
import rpath_capsule_indexer
class CapsuleManager(manager.Manager):
def getIndexerConfig(self):
capsuleDataDir = util.joinPaths(self.cfg.dataPath, 'capsules')
cfg = rpath_capsule_indexer.IndexerConfig()
cfg.configLine("store sqlite:///%s/database.sqlite" %
capsuleDataDir)
cfg.configLine("indexDir %s/packages" % capsuleDataDir)
cfg.configLine("systemsPath %s/systems" % capsuleDataDir)
dataSources = self.db.platformMgr.listPlatformSources().platformSource
# XXX we only deal with RHN for now
if dataSources:
cfg.configLine("user RHN %s %s" % (dataSources[0].username,
dataSources[0].password))
# XXX channels are hardcoded for now
cfg.configLine("channels rhel-i386-as-4")
cfg.configLine("channels rhel-x86_64-as-4")
cfg.configLine("channels rhel-i386-server-5")
cfg.configLine("channels rhel-x86_64-server-5")
util.mkdirChain(capsuleDataDir)
return cfg
def getIndexer(self):
cfg = self.getIndexerConfig()
return rpath_capsule_indexer.Indexer(cfg)
|
from conary.lib import util
from mint.rest.db import manager
import rpath_capsule_indexer
class CapsuleManager(manager.Manager):
def getIndexerConfig(self):
capsuleDataDir = util.joinPaths(self.cfg.dataPath, 'capsules')
cfg = rpath_capsule_indexer.IndexerConfig()
dbDriver = self.db.db.driver
dbConnectString = self.db.db.db.database
cfg.configLine("store %s:///%s" % (dbDriver, dbConnectString))
cfg.configLine("indexDir %s/packages" % capsuleDataDir)
cfg.configLine("systemsPath %s/systems" % capsuleDataDir)
dataSources = self.db.platformMgr.listPlatformSources().platformSource
# XXX we only deal with RHN for now
if dataSources:
cfg.configLine("user RHN %s %s" % (dataSources[0].username,
dataSources[0].password))
# XXX channels are hardcoded for now
cfg.configLine("channels rhel-i386-as-4")
cfg.configLine("channels rhel-x86_64-as-4")
cfg.configLine("channels rhel-i386-server-5")
cfg.configLine("channels rhel-x86_64-server-5")
util.mkdirChain(capsuleDataDir)
return cfg
def getIndexer(self):
cfg = self.getIndexerConfig()
return rpath_capsule_indexer.Indexer(cfg)
|
Use the mint database for capsule data
|
Use the mint database for capsule data
|
Python
|
apache-2.0
|
sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint
|
from conary.lib import util
from mint.rest.db import manager
import rpath_capsule_indexer
class CapsuleManager(manager.Manager):
def getIndexerConfig(self):
capsuleDataDir = util.joinPaths(self.cfg.dataPath, 'capsules')
cfg = rpath_capsule_indexer.IndexerConfig()
- cfg.configLine("store sqlite:///%s/database.sqlite" %
- capsuleDataDir)
+ dbDriver = self.db.db.driver
+ dbConnectString = self.db.db.db.database
+ cfg.configLine("store %s:///%s" % (dbDriver, dbConnectString))
cfg.configLine("indexDir %s/packages" % capsuleDataDir)
cfg.configLine("systemsPath %s/systems" % capsuleDataDir)
dataSources = self.db.platformMgr.listPlatformSources().platformSource
# XXX we only deal with RHN for now
if dataSources:
cfg.configLine("user RHN %s %s" % (dataSources[0].username,
dataSources[0].password))
# XXX channels are hardcoded for now
cfg.configLine("channels rhel-i386-as-4")
cfg.configLine("channels rhel-x86_64-as-4")
cfg.configLine("channels rhel-i386-server-5")
cfg.configLine("channels rhel-x86_64-server-5")
util.mkdirChain(capsuleDataDir)
return cfg
def getIndexer(self):
cfg = self.getIndexerConfig()
return rpath_capsule_indexer.Indexer(cfg)
|
Use the mint database for capsule data
|
## Code Before:
from conary.lib import util
from mint.rest.db import manager
import rpath_capsule_indexer
class CapsuleManager(manager.Manager):
def getIndexerConfig(self):
capsuleDataDir = util.joinPaths(self.cfg.dataPath, 'capsules')
cfg = rpath_capsule_indexer.IndexerConfig()
cfg.configLine("store sqlite:///%s/database.sqlite" %
capsuleDataDir)
cfg.configLine("indexDir %s/packages" % capsuleDataDir)
cfg.configLine("systemsPath %s/systems" % capsuleDataDir)
dataSources = self.db.platformMgr.listPlatformSources().platformSource
# XXX we only deal with RHN for now
if dataSources:
cfg.configLine("user RHN %s %s" % (dataSources[0].username,
dataSources[0].password))
# XXX channels are hardcoded for now
cfg.configLine("channels rhel-i386-as-4")
cfg.configLine("channels rhel-x86_64-as-4")
cfg.configLine("channels rhel-i386-server-5")
cfg.configLine("channels rhel-x86_64-server-5")
util.mkdirChain(capsuleDataDir)
return cfg
def getIndexer(self):
cfg = self.getIndexerConfig()
return rpath_capsule_indexer.Indexer(cfg)
## Instruction:
Use the mint database for capsule data
## Code After:
from conary.lib import util
from mint.rest.db import manager
import rpath_capsule_indexer
class CapsuleManager(manager.Manager):
def getIndexerConfig(self):
capsuleDataDir = util.joinPaths(self.cfg.dataPath, 'capsules')
cfg = rpath_capsule_indexer.IndexerConfig()
dbDriver = self.db.db.driver
dbConnectString = self.db.db.db.database
cfg.configLine("store %s:///%s" % (dbDriver, dbConnectString))
cfg.configLine("indexDir %s/packages" % capsuleDataDir)
cfg.configLine("systemsPath %s/systems" % capsuleDataDir)
dataSources = self.db.platformMgr.listPlatformSources().platformSource
# XXX we only deal with RHN for now
if dataSources:
cfg.configLine("user RHN %s %s" % (dataSources[0].username,
dataSources[0].password))
# XXX channels are hardcoded for now
cfg.configLine("channels rhel-i386-as-4")
cfg.configLine("channels rhel-x86_64-as-4")
cfg.configLine("channels rhel-i386-server-5")
cfg.configLine("channels rhel-x86_64-server-5")
util.mkdirChain(capsuleDataDir)
return cfg
def getIndexer(self):
cfg = self.getIndexerConfig()
return rpath_capsule_indexer.Indexer(cfg)
|
// ... existing code ...
cfg = rpath_capsule_indexer.IndexerConfig()
dbDriver = self.db.db.driver
dbConnectString = self.db.db.db.database
cfg.configLine("store %s:///%s" % (dbDriver, dbConnectString))
cfg.configLine("indexDir %s/packages" % capsuleDataDir)
// ... rest of the code ...
|
589e2df8c9af8ce8102904c9cfebbf87ee2df744
|
ckanext/orgdashboards/tests/helpers.py
|
ckanext/orgdashboards/tests/helpers.py
|
from ckan.tests import factories
def create_mock_data(**kwargs):
mock_data = {}
mock_data['organization'] = factories.Organization()
mock_data['organization_name'] = mock_data['organization']['name']
mock_data['organization_id'] = mock_data['organization']['id']
mock_data['dataset'] = factories.Dataset(owner_org=mock_data['organization_id'])
mock_data['dataset_name'] = mock_data['dataset']['name']
mock_data['package_id'] = mock_data['dataset']['id']
mock_data['resource'] = factories.Resource(package_id=mock_data['package_id'])
mock_data['resource_name'] = mock_data['resource']['name']
mock_data['resource_id'] = mock_data['resource']['id']
mock_data['resource_view'] = factories.ResourceView(
resource_id=mock_data['resource_id'])
mock_data['resource_view_title'] = mock_data['resource_view']['title']
mock_data['context'] = {
'user': factories._get_action_user_name(kwargs)
}
return mock_data
|
''' Helper methods for tests '''
import string
import random
from ckan.tests import factories
def create_mock_data(**kwargs):
mock_data = {}
mock_data['organization'] = factories.Organization()
mock_data['organization_name'] = mock_data['organization']['name']
mock_data['organization_id'] = mock_data['organization']['id']
mock_data['dataset'] = factories.Dataset(owner_org=mock_data['organization_id'])
mock_data['dataset_name'] = mock_data['dataset']['name']
mock_data['package_id'] = mock_data['dataset']['id']
mock_data['resource'] = factories.Resource(package_id=mock_data['package_id'])
mock_data['resource_name'] = mock_data['resource']['name']
mock_data['resource_id'] = mock_data['resource']['id']
mock_data['resource_view'] = factories.ResourceView(
resource_id=mock_data['resource_id'])
mock_data['resource_view_title'] = mock_data['resource_view']['title']
mock_data['context'] = {
'user': factories._get_action_user_name(kwargs)
}
return mock_data
def id_generator(size=6, chars=string.ascii_lowercase + string.digits):
''' Create random id which is a combination of letters and numbers '''
return ''.join(random.choice(chars) for _ in range(size))
|
Add function for generating random id
|
Add function for generating random id
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards
|
+ ''' Helper methods for tests '''
+
+ import string
+ import random
+
from ckan.tests import factories
def create_mock_data(**kwargs):
mock_data = {}
mock_data['organization'] = factories.Organization()
mock_data['organization_name'] = mock_data['organization']['name']
mock_data['organization_id'] = mock_data['organization']['id']
mock_data['dataset'] = factories.Dataset(owner_org=mock_data['organization_id'])
mock_data['dataset_name'] = mock_data['dataset']['name']
mock_data['package_id'] = mock_data['dataset']['id']
mock_data['resource'] = factories.Resource(package_id=mock_data['package_id'])
mock_data['resource_name'] = mock_data['resource']['name']
mock_data['resource_id'] = mock_data['resource']['id']
mock_data['resource_view'] = factories.ResourceView(
resource_id=mock_data['resource_id'])
mock_data['resource_view_title'] = mock_data['resource_view']['title']
mock_data['context'] = {
'user': factories._get_action_user_name(kwargs)
}
return mock_data
+
+ def id_generator(size=6, chars=string.ascii_lowercase + string.digits):
+ ''' Create random id which is a combination of letters and numbers '''
+
+ return ''.join(random.choice(chars) for _ in range(size))
|
Add function for generating random id
|
## Code Before:
from ckan.tests import factories
def create_mock_data(**kwargs):
mock_data = {}
mock_data['organization'] = factories.Organization()
mock_data['organization_name'] = mock_data['organization']['name']
mock_data['organization_id'] = mock_data['organization']['id']
mock_data['dataset'] = factories.Dataset(owner_org=mock_data['organization_id'])
mock_data['dataset_name'] = mock_data['dataset']['name']
mock_data['package_id'] = mock_data['dataset']['id']
mock_data['resource'] = factories.Resource(package_id=mock_data['package_id'])
mock_data['resource_name'] = mock_data['resource']['name']
mock_data['resource_id'] = mock_data['resource']['id']
mock_data['resource_view'] = factories.ResourceView(
resource_id=mock_data['resource_id'])
mock_data['resource_view_title'] = mock_data['resource_view']['title']
mock_data['context'] = {
'user': factories._get_action_user_name(kwargs)
}
return mock_data
## Instruction:
Add function for generating random id
## Code After:
''' Helper methods for tests '''
import string
import random
from ckan.tests import factories
def create_mock_data(**kwargs):
mock_data = {}
mock_data['organization'] = factories.Organization()
mock_data['organization_name'] = mock_data['organization']['name']
mock_data['organization_id'] = mock_data['organization']['id']
mock_data['dataset'] = factories.Dataset(owner_org=mock_data['organization_id'])
mock_data['dataset_name'] = mock_data['dataset']['name']
mock_data['package_id'] = mock_data['dataset']['id']
mock_data['resource'] = factories.Resource(package_id=mock_data['package_id'])
mock_data['resource_name'] = mock_data['resource']['name']
mock_data['resource_id'] = mock_data['resource']['id']
mock_data['resource_view'] = factories.ResourceView(
resource_id=mock_data['resource_id'])
mock_data['resource_view_title'] = mock_data['resource_view']['title']
mock_data['context'] = {
'user': factories._get_action_user_name(kwargs)
}
return mock_data
def id_generator(size=6, chars=string.ascii_lowercase + string.digits):
''' Create random id which is a combination of letters and numbers '''
return ''.join(random.choice(chars) for _ in range(size))
|
// ... existing code ...
''' Helper methods for tests '''
import string
import random
from ckan.tests import factories
// ... modified code ...
return mock_data
def id_generator(size=6, chars=string.ascii_lowercase + string.digits):
''' Create random id which is a combination of letters and numbers '''
return ''.join(random.choice(chars) for _ in range(size))
// ... rest of the code ...
|
ac3c0e93adf35015d7f6cfc8c6cf2e6ec45cdeae
|
server/canonicalization/relationship_mapper.py
|
server/canonicalization/relationship_mapper.py
|
"""Contains functions to canonicalize relationships."""
from __future__ import absolute_import
from __future__ import print_function
from nltk.corpus import wordnet
from .utils import wordnet_helper
from .utils import common
def canonicalize_relationship(text):
words = common.clean_text(text).split()
freq = []
for word in words:
for pos in [wordnet.VERB, wordnet.ADV]:
freq.extend(wordnet_helper.lemma_counter(word,
pos=pos).most_common())
if freq:
return max(freq, key=lambda x: x[1])[0]
else:
return None
|
"""Contains functions to canonicalize relationships."""
from __future__ import absolute_import
from __future__ import print_function
import repoze.lru
from nltk.corpus import wordnet
from .utils import wordnet_helper
from .utils import common
@repoze.lru.lru_cache(4096)
def canonicalize_relationship(text):
words = common.clean_text(text).split()
freq = []
for word in words:
for pos in [wordnet.VERB, wordnet.ADV]:
freq.extend(wordnet_helper.lemma_counter(word,
pos=pos).most_common())
if freq:
return max(freq, key=lambda x: x[1])[0]
else:
return None
|
Add LRU for relationship mapper.
|
[master] Add LRU for relationship mapper.
|
Python
|
mit
|
hotpxl/canonicalization-server,hotpxl/canonicalization-server
|
"""Contains functions to canonicalize relationships."""
from __future__ import absolute_import
from __future__ import print_function
+ import repoze.lru
from nltk.corpus import wordnet
from .utils import wordnet_helper
from .utils import common
+ @repoze.lru.lru_cache(4096)
def canonicalize_relationship(text):
words = common.clean_text(text).split()
freq = []
for word in words:
for pos in [wordnet.VERB, wordnet.ADV]:
freq.extend(wordnet_helper.lemma_counter(word,
pos=pos).most_common())
if freq:
return max(freq, key=lambda x: x[1])[0]
else:
return None
|
Add LRU for relationship mapper.
|
## Code Before:
"""Contains functions to canonicalize relationships."""
from __future__ import absolute_import
from __future__ import print_function
from nltk.corpus import wordnet
from .utils import wordnet_helper
from .utils import common
def canonicalize_relationship(text):
words = common.clean_text(text).split()
freq = []
for word in words:
for pos in [wordnet.VERB, wordnet.ADV]:
freq.extend(wordnet_helper.lemma_counter(word,
pos=pos).most_common())
if freq:
return max(freq, key=lambda x: x[1])[0]
else:
return None
## Instruction:
Add LRU for relationship mapper.
## Code After:
"""Contains functions to canonicalize relationships."""
from __future__ import absolute_import
from __future__ import print_function
import repoze.lru
from nltk.corpus import wordnet
from .utils import wordnet_helper
from .utils import common
@repoze.lru.lru_cache(4096)
def canonicalize_relationship(text):
words = common.clean_text(text).split()
freq = []
for word in words:
for pos in [wordnet.VERB, wordnet.ADV]:
freq.extend(wordnet_helper.lemma_counter(word,
pos=pos).most_common())
if freq:
return max(freq, key=lambda x: x[1])[0]
else:
return None
|
// ... existing code ...
import repoze.lru
from nltk.corpus import wordnet
// ... modified code ...
@repoze.lru.lru_cache(4096)
def canonicalize_relationship(text):
// ... rest of the code ...
|
c5671ab2e5115ce9c022a97a088300dc408e2aa4
|
opendc/util/path_parser.py
|
opendc/util/path_parser.py
|
import json
import sys
import re
def parse(version, endpoint_path):
"""Map an HTTP call to an API path"""
with open('opendc/api/{}/paths.json'.format(version)) as paths_file:
paths = json.load(paths_file)
endpoint_path_parts = endpoint_path.split('/')
paths_parts = [x.split('/') for x in paths if len(x.split('/')) == len(endpoint_path_parts)]
for path_parts in paths_parts:
found = True
for (endpoint_part, part) in zip(endpoint_path_parts, path_parts):
print endpoint_part, part
if not part.startswith('{') and endpoint_part != part:
found = False
break
if found:
sys.stdout.flush()
return '{}/{}'.format(version, '/'.join(path_parts))
return None
|
import json
import sys
import re
def parse(version, endpoint_path):
"""Map an HTTP endpoint path to an API path"""
with open('opendc/api/{}/paths.json'.format(version)) as paths_file:
paths = json.load(paths_file)
endpoint_path_parts = endpoint_path.strip('/').split('/')
paths_parts = [x.split('/') for x in paths if len(x.split('/')) == len(endpoint_path_parts)]
for path_parts in paths_parts:
found = True
for (endpoint_part, part) in zip(endpoint_path_parts, path_parts):
if not part.startswith('{') and endpoint_part != part:
found = False
break
if found:
sys.stdout.flush()
return '{}/{}'.format(version, '/'.join(path_parts))
return None
|
Make path parser robust to trailing /
|
Make path parser robust to trailing /
|
Python
|
mit
|
atlarge-research/opendc-web-server,atlarge-research/opendc-web-server
|
import json
import sys
import re
def parse(version, endpoint_path):
- """Map an HTTP call to an API path"""
+ """Map an HTTP endpoint path to an API path"""
with open('opendc/api/{}/paths.json'.format(version)) as paths_file:
paths = json.load(paths_file)
- endpoint_path_parts = endpoint_path.split('/')
+ endpoint_path_parts = endpoint_path.strip('/').split('/')
paths_parts = [x.split('/') for x in paths if len(x.split('/')) == len(endpoint_path_parts)]
for path_parts in paths_parts:
found = True
for (endpoint_part, part) in zip(endpoint_path_parts, path_parts):
- print endpoint_part, part
if not part.startswith('{') and endpoint_part != part:
found = False
break
if found:
sys.stdout.flush()
return '{}/{}'.format(version, '/'.join(path_parts))
return None
|
Make path parser robust to trailing /
|
## Code Before:
import json
import sys
import re
def parse(version, endpoint_path):
"""Map an HTTP call to an API path"""
with open('opendc/api/{}/paths.json'.format(version)) as paths_file:
paths = json.load(paths_file)
endpoint_path_parts = endpoint_path.split('/')
paths_parts = [x.split('/') for x in paths if len(x.split('/')) == len(endpoint_path_parts)]
for path_parts in paths_parts:
found = True
for (endpoint_part, part) in zip(endpoint_path_parts, path_parts):
print endpoint_part, part
if not part.startswith('{') and endpoint_part != part:
found = False
break
if found:
sys.stdout.flush()
return '{}/{}'.format(version, '/'.join(path_parts))
return None
## Instruction:
Make path parser robust to trailing /
## Code After:
import json
import sys
import re
def parse(version, endpoint_path):
"""Map an HTTP endpoint path to an API path"""
with open('opendc/api/{}/paths.json'.format(version)) as paths_file:
paths = json.load(paths_file)
endpoint_path_parts = endpoint_path.strip('/').split('/')
paths_parts = [x.split('/') for x in paths if len(x.split('/')) == len(endpoint_path_parts)]
for path_parts in paths_parts:
found = True
for (endpoint_part, part) in zip(endpoint_path_parts, path_parts):
if not part.startswith('{') and endpoint_part != part:
found = False
break
if found:
sys.stdout.flush()
return '{}/{}'.format(version, '/'.join(path_parts))
return None
|
// ... existing code ...
def parse(version, endpoint_path):
"""Map an HTTP endpoint path to an API path"""
// ... modified code ...
endpoint_path_parts = endpoint_path.strip('/').split('/')
paths_parts = [x.split('/') for x in paths if len(x.split('/')) == len(endpoint_path_parts)]
...
for (endpoint_part, part) in zip(endpoint_path_parts, path_parts):
if not part.startswith('{') and endpoint_part != part:
// ... rest of the code ...
|
326e7ba1378b691ad6323c2559686f0c4d97b45f
|
flowgen/core.py
|
flowgen/core.py
|
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
|
from __future__ import print_function
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
|
Update py27 compatibility in print function
|
Update py27 compatibility in print function
|
Python
|
mit
|
ad-m/flowgen
|
+ from __future__ import print_function
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
|
Update py27 compatibility in print function
|
## Code Before:
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
## Instruction:
Update py27 compatibility in print function
## Code After:
from __future__ import print_function
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
|
# ... existing code ...
from __future__ import print_function
from flowgen.graph import Graph
# ... rest of the code ...
|
ad00d75cac0afe585853092d458a0d99c1373fc8
|
dlstats/fetchers/__init__.py
|
dlstats/fetchers/__init__.py
|
from . import eurostat, insee, world_bank, IMF, BEA
|
from .eurostat import Eurostat
from .insee import Insee
from .world_bank import WorldBank
from .IMF import IMF
from .BEA import BEA
__all__ = ['Eurostat', 'Insee', 'WorldBank', 'IMF', 'BEA']
|
Clean up the fetchers namespace
|
Clean up the fetchers namespace
|
Python
|
agpl-3.0
|
MichelJuillard/dlstats,mmalter/dlstats,MichelJuillard/dlstats,Widukind/dlstats,mmalter/dlstats,MichelJuillard/dlstats,Widukind/dlstats,mmalter/dlstats
|
- from . import eurostat, insee, world_bank, IMF, BEA
+ from .eurostat import Eurostat
+ from .insee import Insee
+ from .world_bank import WorldBank
+ from .IMF import IMF
+ from .BEA import BEA
+ __all__ = ['Eurostat', 'Insee', 'WorldBank', 'IMF', 'BEA']
+
|
Clean up the fetchers namespace
|
## Code Before:
from . import eurostat, insee, world_bank, IMF, BEA
## Instruction:
Clean up the fetchers namespace
## Code After:
from .eurostat import Eurostat
from .insee import Insee
from .world_bank import WorldBank
from .IMF import IMF
from .BEA import BEA
__all__ = ['Eurostat', 'Insee', 'WorldBank', 'IMF', 'BEA']
|
# ... existing code ...
from .eurostat import Eurostat
from .insee import Insee
from .world_bank import WorldBank
from .IMF import IMF
from .BEA import BEA
__all__ = ['Eurostat', 'Insee', 'WorldBank', 'IMF', 'BEA']
# ... rest of the code ...
|
09851ff2903db29703616da0fbc9ec003955712a
|
zerver/lib/markdown/preprocessor_priorities.py
|
zerver/lib/markdown/preprocessor_priorities.py
|
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
"help_relative_links": 475,
"setting": 450,
"fenced_code_block": 25,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
# "include_wrapper": 500,
"help_relative_links": 475,
"setting": 450,
# "normalize_whitespace": 30,
"fenced_code_block": 25,
# "html_block": 20,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
Document built-in preprocessor priorities for convenience.
|
markdown: Document built-in preprocessor priorities for convenience.
Fixes #19810
|
Python
|
apache-2.0
|
eeshangarg/zulip,rht/zulip,rht/zulip,kou/zulip,eeshangarg/zulip,rht/zulip,eeshangarg/zulip,zulip/zulip,rht/zulip,andersk/zulip,kou/zulip,eeshangarg/zulip,kou/zulip,andersk/zulip,andersk/zulip,andersk/zulip,kou/zulip,andersk/zulip,rht/zulip,rht/zulip,zulip/zulip,kou/zulip,zulip/zulip,zulip/zulip,eeshangarg/zulip,andersk/zulip,kou/zulip,kou/zulip,andersk/zulip,eeshangarg/zulip,rht/zulip,zulip/zulip,zulip/zulip,eeshangarg/zulip,zulip/zulip
|
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
+ # "include_wrapper": 500,
"help_relative_links": 475,
"setting": 450,
+ # "normalize_whitespace": 30,
"fenced_code_block": 25,
+ # "html_block": 20,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
Document built-in preprocessor priorities for convenience.
|
## Code Before:
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
"help_relative_links": 475,
"setting": 450,
"fenced_code_block": 25,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
## Instruction:
Document built-in preprocessor priorities for convenience.
## Code After:
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
# "include_wrapper": 500,
"help_relative_links": 475,
"setting": 450,
# "normalize_whitespace": 30,
"fenced_code_block": 25,
# "html_block": 20,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
// ... existing code ...
"include": 500,
# "include_wrapper": 500,
"help_relative_links": 475,
// ... modified code ...
"setting": 450,
# "normalize_whitespace": 30,
"fenced_code_block": 25,
# "html_block": 20,
"tabbed_sections": -500,
// ... rest of the code ...
|
bcc79588e5e49c928210d6830fbe1a7386fcf5bb
|
apps/search/tasks.py
|
apps/search/tasks.py
|
import logging
from django.conf import settings
from django.db.models.signals import pre_delete
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
import logging
import warnings
from django.conf import settings
from django.db.models.signals import pre_delete
# ignore a deprecation warning from elasticutils until the fix is released
# refs https://github.com/mozilla/elasticutils/pull/160
warnings.filterwarnings("ignore",
category=DeprecationWarning,
module='celery.decorators')
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
Stop a deprecation warning that is thrown in elasticutils.
|
Stop a deprecation warning that is thrown in elasticutils.
This is not going to be needed once https://github.com/mozilla/elasticutils/pull/160
has been released.
|
Python
|
mpl-2.0
|
jezdez/kuma,whip112/Whip112,FrankBian/kuma,YOTOV-LIMITED/kuma,SphinxKnight/kuma,jgmize/kuma,RanadeepPolavarapu/kuma,ollie314/kuma,nhenezi/kuma,FrankBian/kuma,surajssd/kuma,YOTOV-LIMITED/kuma,yfdyh000/kuma,cindyyu/kuma,SphinxKnight/kuma,openjck/kuma,MenZil/kuma,RanadeepPolavarapu/kuma,yfdyh000/kuma,whip112/Whip112,carnell69/kuma,YOTOV-LIMITED/kuma,ollie314/kuma,RanadeepPolavarapu/kuma,SphinxKnight/kuma,surajssd/kuma,safwanrahman/kuma,utkbansal/kuma,groovecoder/kuma,chirilo/kuma,openjck/kuma,cindyyu/kuma,ronakkhunt/kuma,safwanrahman/kuma,robhudson/kuma,Elchi3/kuma,Elchi3/kuma,biswajitsahu/kuma,robhudson/kuma,yfdyh000/kuma,robhudson/kuma,biswajitsahu/kuma,hoosteeno/kuma,chirilo/kuma,groovecoder/kuma,scrollback/kuma,ronakkhunt/kuma,Elchi3/kuma,davehunt/kuma,ollie314/kuma,jwhitlock/kuma,tximikel/kuma,Elchi3/kuma,carnell69/kuma,hoosteeno/kuma,utkbansal/kuma,davehunt/kuma,anaran/kuma,mastizada/kuma,carnell69/kuma,bluemini/kuma,jwhitlock/kuma,SphinxKnight/kuma,scrollback/kuma,jgmize/kuma,chirilo/kuma,cindyyu/kuma,biswajitsahu/kuma,mozilla/kuma,a2sheppy/kuma,a2sheppy/kuma,nhenezi/kuma,MenZil/kuma,ollie314/kuma,tximikel/kuma,davidyezsetz/kuma,a2sheppy/kuma,surajssd/kuma,davehunt/kuma,yfdyh000/kuma,biswajitsahu/kuma,darkwing/kuma,RanadeepPolavarapu/kuma,tximikel/kuma,jezdez/kuma,bluemini/kuma,whip112/Whip112,surajssd/kuma,nhenezi/kuma,mozilla/kuma,openjck/kuma,nhenezi/kuma,davidyezsetz/kuma,darkwing/kuma,carnell69/kuma,scrollback/kuma,MenZil/kuma,MenZil/kuma,jgmize/kuma,varunkamra/kuma,darkwing/kuma,hoosteeno/kuma,cindyyu/kuma,groovecoder/kuma,YOTOV-LIMITED/kuma,darkwing/kuma,openjck/kuma,groovecoder/kuma,robhudson/kuma,openjck/kuma,ollie314/kuma,utkbansal/kuma,davehunt/kuma,escattone/kuma,groovecoder/kuma,bluemini/kuma,ronakkhunt/kuma,ollie314/kuma,jgmize/kuma,surajssd/kuma,a2sheppy/kuma,hoosteeno/kuma,jezdez/kuma,YOTOV-LIMITED/kuma,jwhitlock/kuma,utkbansal/kuma,a2sheppy/kuma,cindyyu/kuma,varunkamra/kuma,jwhitlock/kuma,jezdez/kuma,varunkamra/kuma,carnell69/kuma,carnell69/kuma,mozilla/kuma,biswajitsahu/kuma,anaran/kuma,yfdyh000/kuma,YOTOV-LIMITED/kuma,escattone/kuma,scrollback/kuma,varunkamra/kuma,utkbansal/kuma,RanadeepPolavarapu/kuma,MenZil/kuma,SphinxKnight/kuma,nhenezi/kuma,davehunt/kuma,whip112/Whip112,hoosteeno/kuma,chirilo/kuma,biswajitsahu/kuma,mastizada/kuma,safwanrahman/kuma,davidyezsetz/kuma,anaran/kuma,Elchi3/kuma,bluemini/kuma,whip112/Whip112,FrankBian/kuma,utkbansal/kuma,varunkamra/kuma,safwanrahman/kuma,ronakkhunt/kuma,tximikel/kuma,ronakkhunt/kuma,davehunt/kuma,tximikel/kuma,anaran/kuma,chirilo/kuma,darkwing/kuma,openjck/kuma,FrankBian/kuma,mastizada/kuma,anaran/kuma,varunkamra/kuma,groovecoder/kuma,davidyezsetz/kuma,SphinxKnight/kuma,bluemini/kuma,anaran/kuma,ronakkhunt/kuma,robhudson/kuma,MenZil/kuma,jezdez/kuma,bluemini/kuma,mozilla/kuma,chirilo/kuma,yfdyh000/kuma,scrollback/kuma,cindyyu/kuma,jgmize/kuma,safwanrahman/kuma,safwanrahman/kuma,whip112/Whip112,darkwing/kuma,jwhitlock/kuma,FrankBian/kuma,jgmize/kuma,davidyezsetz/kuma,RanadeepPolavarapu/kuma,hoosteeno/kuma,mastizada/kuma,surajssd/kuma,mozilla/kuma,escattone/kuma,robhudson/kuma,tximikel/kuma,jezdez/kuma
|
import logging
+ import warnings
from django.conf import settings
from django.db.models.signals import pre_delete
+
+ # ignore a deprecation warning from elasticutils until the fix is released
+ # refs https://github.com/mozilla/elasticutils/pull/160
+ warnings.filterwarnings("ignore",
+ category=DeprecationWarning,
+ module='celery.decorators')
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
Stop a deprecation warning that is thrown in elasticutils.
|
## Code Before:
import logging
from django.conf import settings
from django.db.models.signals import pre_delete
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
## Instruction:
Stop a deprecation warning that is thrown in elasticutils.
## Code After:
import logging
import warnings
from django.conf import settings
from django.db.models.signals import pre_delete
# ignore a deprecation warning from elasticutils until the fix is released
# refs https://github.com/mozilla/elasticutils/pull/160
warnings.filterwarnings("ignore",
category=DeprecationWarning,
module='celery.decorators')
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
...
import logging
import warnings
...
from django.db.models.signals import pre_delete
# ignore a deprecation warning from elasticutils until the fix is released
# refs https://github.com/mozilla/elasticutils/pull/160
warnings.filterwarnings("ignore",
category=DeprecationWarning,
module='celery.decorators')
...
|
bcaee4414402017985f8a25134a5cecc99a1c8bb
|
docker/build_scripts/ssl-check.py
|
docker/build_scripts/ssl-check.py
|
GOOD_SSL = "https://google.com"
BAD_SSL = "https://self-signed.badssl.com"
import sys
print("Testing SSL certificate checking for Python:", sys.version)
if (sys.version_info[:2] < (2, 7)
or sys.version_info[:2] < (3, 4)):
print("This version never checks SSL certs; skipping tests")
sys.exit(0)
if sys.version_info[0] >= 3:
from urllib.request import urlopen
EXC = OSError
else:
from urllib import urlopen
EXC = IOError
print("Connecting to %s should work" % (GOOD_SSL,))
urlopen(GOOD_SSL)
print("...it did, yay.")
print("Connecting to %s should fail" % (BAD_SSL,))
try:
urlopen(BAD_SSL)
# If we get here then we failed:
print("...it DIDN'T!!!!!11!!1one!")
sys.exit(1)
except EXC:
print("...it did, yay.")
|
GOOD_SSL = "https://google.com"
BAD_SSL = "https://self-signed.badssl.com"
import sys
print("Testing SSL certificate checking for Python:", sys.version)
if (sys.version_info[:2] < (3, 4)):
print("This version never checks SSL certs; skipping tests")
sys.exit(0)
if sys.version_info[0] >= 3:
from urllib.request import urlopen
EXC = OSError
else:
from urllib import urlopen
EXC = IOError
print("Connecting to %s should work" % (GOOD_SSL,))
urlopen(GOOD_SSL)
print("...it did, yay.")
print("Connecting to %s should fail" % (BAD_SSL,))
try:
urlopen(BAD_SSL)
# If we get here then we failed:
print("...it DIDN'T!!!!!11!!1one!")
sys.exit(1)
except EXC:
print("...it did, yay.")
|
Remove leftover relic from supporting CPython 2.6.
|
Remove leftover relic from supporting CPython 2.6.
|
Python
|
mit
|
pypa/manylinux,manylinux/manylinux,pypa/manylinux,pypa/manylinux,manylinux/manylinux,Parsely/manylinux,Parsely/manylinux
|
GOOD_SSL = "https://google.com"
BAD_SSL = "https://self-signed.badssl.com"
import sys
print("Testing SSL certificate checking for Python:", sys.version)
- if (sys.version_info[:2] < (2, 7)
+ if (sys.version_info[:2] < (3, 4)):
- or sys.version_info[:2] < (3, 4)):
print("This version never checks SSL certs; skipping tests")
sys.exit(0)
if sys.version_info[0] >= 3:
from urllib.request import urlopen
EXC = OSError
else:
from urllib import urlopen
EXC = IOError
print("Connecting to %s should work" % (GOOD_SSL,))
urlopen(GOOD_SSL)
print("...it did, yay.")
print("Connecting to %s should fail" % (BAD_SSL,))
try:
urlopen(BAD_SSL)
# If we get here then we failed:
print("...it DIDN'T!!!!!11!!1one!")
sys.exit(1)
except EXC:
print("...it did, yay.")
+
|
Remove leftover relic from supporting CPython 2.6.
|
## Code Before:
GOOD_SSL = "https://google.com"
BAD_SSL = "https://self-signed.badssl.com"
import sys
print("Testing SSL certificate checking for Python:", sys.version)
if (sys.version_info[:2] < (2, 7)
or sys.version_info[:2] < (3, 4)):
print("This version never checks SSL certs; skipping tests")
sys.exit(0)
if sys.version_info[0] >= 3:
from urllib.request import urlopen
EXC = OSError
else:
from urllib import urlopen
EXC = IOError
print("Connecting to %s should work" % (GOOD_SSL,))
urlopen(GOOD_SSL)
print("...it did, yay.")
print("Connecting to %s should fail" % (BAD_SSL,))
try:
urlopen(BAD_SSL)
# If we get here then we failed:
print("...it DIDN'T!!!!!11!!1one!")
sys.exit(1)
except EXC:
print("...it did, yay.")
## Instruction:
Remove leftover relic from supporting CPython 2.6.
## Code After:
GOOD_SSL = "https://google.com"
BAD_SSL = "https://self-signed.badssl.com"
import sys
print("Testing SSL certificate checking for Python:", sys.version)
if (sys.version_info[:2] < (3, 4)):
print("This version never checks SSL certs; skipping tests")
sys.exit(0)
if sys.version_info[0] >= 3:
from urllib.request import urlopen
EXC = OSError
else:
from urllib import urlopen
EXC = IOError
print("Connecting to %s should work" % (GOOD_SSL,))
urlopen(GOOD_SSL)
print("...it did, yay.")
print("Connecting to %s should fail" % (BAD_SSL,))
try:
urlopen(BAD_SSL)
# If we get here then we failed:
print("...it DIDN'T!!!!!11!!1one!")
sys.exit(1)
except EXC:
print("...it did, yay.")
|
// ... existing code ...
if (sys.version_info[:2] < (3, 4)):
print("This version never checks SSL certs; skipping tests")
// ... rest of the code ...
|
a3a5d2d6b76a4e903fea232b746b2df8b208ec9e
|
km3pipe/tests/test_plot.py
|
km3pipe/tests/test_plot.py
|
import numpy as np
from km3pipe.testing import TestCase
from km3pipe.plot import bincenters
__author__ = "Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Moritz Lotze"
__email__ = "[email protected]"
__status__ = "Development"
class TestBins(TestCase):
def test_binlims(self):
bins = np.linspace(0, 20, 21)
assert bincenters(bins).shape[0] == bins.shape[0] - 1
|
import numpy as np
from km3pipe.testing import TestCase, patch
from km3pipe.plot import bincenters, meshgrid, automeshgrid, diag
__author__ = "Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Moritz Lotze"
__email__ = "[email protected]"
__status__ = "Development"
class TestBins(TestCase):
def test_binlims(self):
bins = np.linspace(0, 20, 21)
assert bincenters(bins).shape[0] == bins.shape[0] - 1
class TestMeshStuff(TestCase):
def test_meshgrid(self):
xx, yy = meshgrid(-1, 1, 0.8)
assert np.allclose([[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6]], xx)
assert np.allclose([[-1.0, -1.0, -1.0],
[-0.2, -0.2, -0.2],
[0.6, 0.6, 0.6]], yy)
def test_meshgrid_with_y_specs(self):
xx, yy = meshgrid(-1, 1, 0.8, -10, 10, 8)
assert np.allclose([[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6]], xx)
assert np.allclose([[-10, -10, -10],
[-2, -2, -2],
[6, 6, 6]], yy)
class TestDiag(TestCase):
def test_call(self):
diag()
|
Add tests for plot functions
|
Add tests for plot functions
|
Python
|
mit
|
tamasgal/km3pipe,tamasgal/km3pipe
|
import numpy as np
- from km3pipe.testing import TestCase
+ from km3pipe.testing import TestCase, patch
- from km3pipe.plot import bincenters
+ from km3pipe.plot import bincenters, meshgrid, automeshgrid, diag
__author__ = "Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Moritz Lotze"
__email__ = "[email protected]"
__status__ = "Development"
class TestBins(TestCase):
def test_binlims(self):
bins = np.linspace(0, 20, 21)
assert bincenters(bins).shape[0] == bins.shape[0] - 1
+
+ class TestMeshStuff(TestCase):
+ def test_meshgrid(self):
+ xx, yy = meshgrid(-1, 1, 0.8)
+ assert np.allclose([[-1.0, -0.2, 0.6],
+ [-1.0, -0.2, 0.6],
+ [-1.0, -0.2, 0.6]], xx)
+ assert np.allclose([[-1.0, -1.0, -1.0],
+ [-0.2, -0.2, -0.2],
+ [0.6, 0.6, 0.6]], yy)
+
+ def test_meshgrid_with_y_specs(self):
+ xx, yy = meshgrid(-1, 1, 0.8, -10, 10, 8)
+ assert np.allclose([[-1.0, -0.2, 0.6],
+ [-1.0, -0.2, 0.6],
+ [-1.0, -0.2, 0.6]], xx)
+ assert np.allclose([[-10, -10, -10],
+ [-2, -2, -2],
+ [6, 6, 6]], yy)
+
+
+ class TestDiag(TestCase):
+ def test_call(self):
+ diag()
+
|
Add tests for plot functions
|
## Code Before:
import numpy as np
from km3pipe.testing import TestCase
from km3pipe.plot import bincenters
__author__ = "Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Moritz Lotze"
__email__ = "[email protected]"
__status__ = "Development"
class TestBins(TestCase):
def test_binlims(self):
bins = np.linspace(0, 20, 21)
assert bincenters(bins).shape[0] == bins.shape[0] - 1
## Instruction:
Add tests for plot functions
## Code After:
import numpy as np
from km3pipe.testing import TestCase, patch
from km3pipe.plot import bincenters, meshgrid, automeshgrid, diag
__author__ = "Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Moritz Lotze"
__email__ = "[email protected]"
__status__ = "Development"
class TestBins(TestCase):
def test_binlims(self):
bins = np.linspace(0, 20, 21)
assert bincenters(bins).shape[0] == bins.shape[0] - 1
class TestMeshStuff(TestCase):
def test_meshgrid(self):
xx, yy = meshgrid(-1, 1, 0.8)
assert np.allclose([[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6]], xx)
assert np.allclose([[-1.0, -1.0, -1.0],
[-0.2, -0.2, -0.2],
[0.6, 0.6, 0.6]], yy)
def test_meshgrid_with_y_specs(self):
xx, yy = meshgrid(-1, 1, 0.8, -10, 10, 8)
assert np.allclose([[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6]], xx)
assert np.allclose([[-10, -10, -10],
[-2, -2, -2],
[6, 6, 6]], yy)
class TestDiag(TestCase):
def test_call(self):
diag()
|
// ... existing code ...
from km3pipe.testing import TestCase, patch
from km3pipe.plot import bincenters, meshgrid, automeshgrid, diag
// ... modified code ...
assert bincenters(bins).shape[0] == bins.shape[0] - 1
class TestMeshStuff(TestCase):
def test_meshgrid(self):
xx, yy = meshgrid(-1, 1, 0.8)
assert np.allclose([[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6]], xx)
assert np.allclose([[-1.0, -1.0, -1.0],
[-0.2, -0.2, -0.2],
[0.6, 0.6, 0.6]], yy)
def test_meshgrid_with_y_specs(self):
xx, yy = meshgrid(-1, 1, 0.8, -10, 10, 8)
assert np.allclose([[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6]], xx)
assert np.allclose([[-10, -10, -10],
[-2, -2, -2],
[6, 6, 6]], yy)
class TestDiag(TestCase):
def test_call(self):
diag()
// ... rest of the code ...
|
98f7c1080765e00954d0c38a98ab1bb3e207c059
|
podcoder.py
|
podcoder.py
|
from podpublish import configuration
from podpublish import encoder
from podpublish import uploader
def main():
config = configuration.Configuration('podcoder.ini')
encoder.audio_encode(config, 'mp3')
encoder.mp3_tag(config)
encoder.mp3_coverart(config)
encoder.audio_encode(config, 'ogg')
encoder.ogg_tag(config)
encoder.ogg_coverart(config)
encoder.png_header(config)
encoder.png_poster(config)
encoder.mkv_encode(config)
uploader.youtube_upload(config)
if __name__ == '__main__':
main()
|
from podpublish import configuration
from podpublish import encoder
def main():
config = configuration.Configuration('podcoder.ini')
if not config.mp3['skip']:
encoder.audio_encode(config, 'mp3')
encoder.mp3_tag(config)
encoder.mp3_coverart(config)
if not config.ogg['skip']:
encoder.audio_encode(config, 'ogg')
encoder.ogg_tag(config)
encoder.ogg_coverart(config)
if not config.youtube['skip']:
encoder.png_header(config)
encoder.png_poster(config)
encoder.mkv_encode(config)
if __name__ == '__main__':
main()
|
Determine what to encode based in skip options.
|
Determine what to encode based in skip options.
|
Python
|
lgpl-2.1
|
rikai/podpublish
|
from podpublish import configuration
from podpublish import encoder
- from podpublish import uploader
def main():
config = configuration.Configuration('podcoder.ini')
+ if not config.mp3['skip']:
- encoder.audio_encode(config, 'mp3')
+ encoder.audio_encode(config, 'mp3')
- encoder.mp3_tag(config)
+ encoder.mp3_tag(config)
- encoder.mp3_coverart(config)
+ encoder.mp3_coverart(config)
+
+ if not config.ogg['skip']:
- encoder.audio_encode(config, 'ogg')
+ encoder.audio_encode(config, 'ogg')
- encoder.ogg_tag(config)
+ encoder.ogg_tag(config)
- encoder.ogg_coverart(config)
+ encoder.ogg_coverart(config)
+
+ if not config.youtube['skip']:
- encoder.png_header(config)
+ encoder.png_header(config)
- encoder.png_poster(config)
+ encoder.png_poster(config)
- encoder.mkv_encode(config)
+ encoder.mkv_encode(config)
- uploader.youtube_upload(config)
if __name__ == '__main__':
main()
|
Determine what to encode based in skip options.
|
## Code Before:
from podpublish import configuration
from podpublish import encoder
from podpublish import uploader
def main():
config = configuration.Configuration('podcoder.ini')
encoder.audio_encode(config, 'mp3')
encoder.mp3_tag(config)
encoder.mp3_coverart(config)
encoder.audio_encode(config, 'ogg')
encoder.ogg_tag(config)
encoder.ogg_coverart(config)
encoder.png_header(config)
encoder.png_poster(config)
encoder.mkv_encode(config)
uploader.youtube_upload(config)
if __name__ == '__main__':
main()
## Instruction:
Determine what to encode based in skip options.
## Code After:
from podpublish import configuration
from podpublish import encoder
def main():
config = configuration.Configuration('podcoder.ini')
if not config.mp3['skip']:
encoder.audio_encode(config, 'mp3')
encoder.mp3_tag(config)
encoder.mp3_coverart(config)
if not config.ogg['skip']:
encoder.audio_encode(config, 'ogg')
encoder.ogg_tag(config)
encoder.ogg_coverart(config)
if not config.youtube['skip']:
encoder.png_header(config)
encoder.png_poster(config)
encoder.mkv_encode(config)
if __name__ == '__main__':
main()
|
...
from podpublish import encoder
...
config = configuration.Configuration('podcoder.ini')
if not config.mp3['skip']:
encoder.audio_encode(config, 'mp3')
encoder.mp3_tag(config)
encoder.mp3_coverart(config)
if not config.ogg['skip']:
encoder.audio_encode(config, 'ogg')
encoder.ogg_tag(config)
encoder.ogg_coverart(config)
if not config.youtube['skip']:
encoder.png_header(config)
encoder.png_poster(config)
encoder.mkv_encode(config)
...
|
565c95ce9a8ff96d177196c6dbf8d8f88cdfa029
|
poyo/exceptions.py
|
poyo/exceptions.py
|
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
|
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
class IgnoredMatchException(PoyoException):
"""Raised when a match does result in a Python representation such as a
comment or a blank line.
"""
|
Add an error class for string data that is ignored by the parser
|
Add an error class for string data that is ignored by the parser
|
Python
|
mit
|
hackebrot/poyo
|
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
+
+ class IgnoredMatchException(PoyoException):
+ """Raised when a match does result in a Python representation such as a
+ comment or a blank line.
+ """
+
|
Add an error class for string data that is ignored by the parser
|
## Code Before:
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
## Instruction:
Add an error class for string data that is ignored by the parser
## Code After:
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
class IgnoredMatchException(PoyoException):
"""Raised when a match does result in a Python representation such as a
comment or a blank line.
"""
|
...
"""
class IgnoredMatchException(PoyoException):
"""Raised when a match does result in a Python representation such as a
comment or a blank line.
"""
...
|
d5f782fc7a8c7835af0d4d2810a923d218dea938
|
mplwidget.py
|
mplwidget.py
|
from PyQt4 import QtGui
import matplotlib as mpl
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
import matplotlib.mlab as mlab
import matplotlib.gridspec as gridspec
class MplCanvas(FigureCanvas):
def __init__(self):
self.fig = Figure()
FigureCanvas.__init__(self, self.fig)
FigureCanvas.setSizePolicy(self,
QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def resizeEvent(self, event):
FigureCanvas.resizeEvent(self, event)
class MplWidget(QtGui.QWidget):
def __init__(self, parent = None):
QtGui.QWidget.__init__(self, parent)
self.canvas = MplCanvas()
self.canvas.setParent(self)
self.mpl_toolbar = NavigationToolbar(self.canvas, self)
|
from PyQt4 import QtGui,QtCore
import matplotlib as mpl
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
import matplotlib.mlab as mlab
import matplotlib.gridspec as gridspec
class MplCanvas(FigureCanvas):
def __init__(self):
self.fig = Figure()
FigureCanvas.__init__(self, self.fig)
FigureCanvas.setSizePolicy(self,
QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def sizeHint(self):
w, h = self.get_width_height()
return QtCore.QSize(w,h)
class MplWidget(QtGui.QWidget):
def __init__(self, parent = None):
QtGui.QWidget.__init__(self, parent)
self.canvas = MplCanvas()
self.mpl_toolbar = NavigationToolbar(self.canvas, self)
layout = QtGui.QVBoxLayout()
self.setLayout(layout)
layout.addWidget(self.canvas)
|
Expand figure when window is resized
|
Expand figure when window is resized
|
Python
|
apache-2.0
|
scholi/pyOmicron
|
- from PyQt4 import QtGui
+ from PyQt4 import QtGui,QtCore
import matplotlib as mpl
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
import matplotlib.mlab as mlab
import matplotlib.gridspec as gridspec
class MplCanvas(FigureCanvas):
def __init__(self):
self.fig = Figure()
FigureCanvas.__init__(self, self.fig)
FigureCanvas.setSizePolicy(self,
QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
- def resizeEvent(self, event):
- FigureCanvas.resizeEvent(self, event)
+ def sizeHint(self):
+ w, h = self.get_width_height()
+ return QtCore.QSize(w,h)
class MplWidget(QtGui.QWidget):
def __init__(self, parent = None):
QtGui.QWidget.__init__(self, parent)
self.canvas = MplCanvas()
- self.canvas.setParent(self)
self.mpl_toolbar = NavigationToolbar(self.canvas, self)
+ layout = QtGui.QVBoxLayout()
+ self.setLayout(layout)
+ layout.addWidget(self.canvas)
|
Expand figure when window is resized
|
## Code Before:
from PyQt4 import QtGui
import matplotlib as mpl
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
import matplotlib.mlab as mlab
import matplotlib.gridspec as gridspec
class MplCanvas(FigureCanvas):
def __init__(self):
self.fig = Figure()
FigureCanvas.__init__(self, self.fig)
FigureCanvas.setSizePolicy(self,
QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def resizeEvent(self, event):
FigureCanvas.resizeEvent(self, event)
class MplWidget(QtGui.QWidget):
def __init__(self, parent = None):
QtGui.QWidget.__init__(self, parent)
self.canvas = MplCanvas()
self.canvas.setParent(self)
self.mpl_toolbar = NavigationToolbar(self.canvas, self)
## Instruction:
Expand figure when window is resized
## Code After:
from PyQt4 import QtGui,QtCore
import matplotlib as mpl
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
import matplotlib.mlab as mlab
import matplotlib.gridspec as gridspec
class MplCanvas(FigureCanvas):
def __init__(self):
self.fig = Figure()
FigureCanvas.__init__(self, self.fig)
FigureCanvas.setSizePolicy(self,
QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def sizeHint(self):
w, h = self.get_width_height()
return QtCore.QSize(w,h)
class MplWidget(QtGui.QWidget):
def __init__(self, parent = None):
QtGui.QWidget.__init__(self, parent)
self.canvas = MplCanvas()
self.mpl_toolbar = NavigationToolbar(self.canvas, self)
layout = QtGui.QVBoxLayout()
self.setLayout(layout)
layout.addWidget(self.canvas)
|
// ... existing code ...
from PyQt4 import QtGui,QtCore
import matplotlib as mpl
// ... modified code ...
FigureCanvas.updateGeometry(self)
def sizeHint(self):
w, h = self.get_width_height()
return QtCore.QSize(w,h)
...
self.canvas = MplCanvas()
self.mpl_toolbar = NavigationToolbar(self.canvas, self)
layout = QtGui.QVBoxLayout()
self.setLayout(layout)
layout.addWidget(self.canvas)
// ... rest of the code ...
|
c15dab903d3759578449279cc034d766d362d41f
|
rest_framework/authtoken/serializers.py
|
rest_framework/authtoken/serializers.py
|
from django.contrib.auth import authenticate
from rest_framework import serializers
class AuthTokenSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, attrs):
username = attrs.get('username')
password = attrs.get('password')
if username and password:
user = authenticate(username=username, password=password)
if user:
if not user.is_active:
raise serializers.ValidationError('User account is disabled.')
attrs['user'] = user
return attrs
else:
raise serializers.ValidationError('Unable to login with provided credentials.')
else:
raise serializers.ValidationError('Must include "username" and "password"')
|
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthTokenSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, attrs):
username = attrs.get('username')
password = attrs.get('password')
if username and password:
user = authenticate(username=username, password=password)
if user:
if not user.is_active:
msg = _('User account is disabled.')
raise serializers.ValidationError()
attrs['user'] = user
return attrs
else:
msg = _('Unable to login with provided credentials.')
raise serializers.ValidationError(msg)
else:
msg = _('Must include "username" and "password"')
raise serializers.ValidationError(msg)
|
Mark strings in AuthTokenSerializer as translatable
|
Mark strings in AuthTokenSerializer as translatable
|
Python
|
bsd-2-clause
|
linovia/django-rest-framework,nhorelik/django-rest-framework,rafaelang/django-rest-framework,iheitlager/django-rest-framework,fishky/django-rest-framework,bluedazzle/django-rest-framework,damycra/django-rest-framework,HireAnEsquire/django-rest-framework,jerryhebert/django-rest-framework,gregmuellegger/django-rest-framework,ashishfinoit/django-rest-framework,kennydude/django-rest-framework,rafaelcaricio/django-rest-framework,YBJAY00000/django-rest-framework,potpath/django-rest-framework,edx/django-rest-framework,elim/django-rest-framework,rhblind/django-rest-framework,krinart/django-rest-framework,James1345/django-rest-framework,kezabelle/django-rest-framework,elim/django-rest-framework,jpadilla/django-rest-framework,d0ugal/django-rest-framework,davesque/django-rest-framework,hnakamur/django-rest-framework,jpulec/django-rest-framework,edx/django-rest-framework,canassa/django-rest-framework,jpadilla/django-rest-framework,thedrow/django-rest-framework-1,arpheno/django-rest-framework,kennydude/django-rest-framework,sehmaschine/django-rest-framework,vstoykov/django-rest-framework,akalipetis/django-rest-framework,maryokhin/django-rest-framework,qsorix/django-rest-framework,iheitlager/django-rest-framework,jness/django-rest-framework,jtiai/django-rest-framework,aericson/django-rest-framework,simudream/django-rest-framework,hunter007/django-rest-framework,AlexandreProenca/django-rest-framework,justanr/django-rest-framework,ossanna16/django-rest-framework,MJafarMashhadi/django-rest-framework,andriy-s/django-rest-framework,dmwyatt/django-rest-framework,fishky/django-rest-framework,jerryhebert/django-rest-framework,canassa/django-rest-framework,jpadilla/django-rest-framework,davesque/django-rest-framework,hnakamur/django-rest-framework,wangpanjun/django-rest-framework,simudream/django-rest-framework,maryokhin/django-rest-framework,tomchristie/django-rest-framework,pombredanne/django-rest-framework,lubomir/django-rest-framework,jtiai/django-rest-framework,hnakamur/django-rest-framework,delinhabit/django-rest-framework,AlexandreProenca/django-rest-framework,paolopaolopaolo/django-rest-framework,atombrella/django-rest-framework,fishky/django-rest-framework,mgaitan/django-rest-framework,delinhabit/django-rest-framework,vstoykov/django-rest-framework,tigeraniya/django-rest-framework,ambivalentno/django-rest-framework,uploadcare/django-rest-framework,davesque/django-rest-framework,wedaly/django-rest-framework,adambain-vokal/django-rest-framework,sheppard/django-rest-framework,xiaotangyuan/django-rest-framework,ashishfinoit/django-rest-framework,thedrow/django-rest-framework-1,kylefox/django-rest-framework,paolopaolopaolo/django-rest-framework,buptlsl/django-rest-framework,zeldalink0515/django-rest-framework,maryokhin/django-rest-framework,sbellem/django-rest-framework,kgeorgy/django-rest-framework,MJafarMashhadi/django-rest-framework,atombrella/django-rest-framework,alacritythief/django-rest-framework,tomchristie/django-rest-framework,wwj718/django-rest-framework,qsorix/django-rest-framework,brandoncazander/django-rest-framework,hunter007/django-rest-framework,pombredanne/django-rest-framework,callorico/django-rest-framework,sbellem/django-rest-framework,nryoung/django-rest-framework,justanr/django-rest-framework,qsorix/django-rest-framework,andriy-s/django-rest-framework,ebsaral/django-rest-framework,linovia/django-rest-framework,raphaelmerx/django-rest-framework,dmwyatt/django-rest-framework,ambivalentno/django-rest-framework,kezabelle/django-rest-framework,James1345/django-rest-framework,waytai/django-rest-framework,bluedazzle/django-rest-framework,jness/django-rest-framework,ossanna16/django-rest-framework,tcroiset/django-rest-framework,delinhabit/django-rest-framework,douwevandermeij/django-rest-framework,zeldalink0515/django-rest-framework,wwj718/django-rest-framework,d0ugal/django-rest-framework,wwj718/django-rest-framework,leeahoward/django-rest-framework,werthen/django-rest-framework,justanr/django-rest-framework,nryoung/django-rest-framework,tomchristie/django-rest-framework,sbellem/django-rest-framework,sehmaschine/django-rest-framework,buptlsl/django-rest-framework,damycra/django-rest-framework,kylefox/django-rest-framework,andriy-s/django-rest-framework,VishvajitP/django-rest-framework,callorico/django-rest-framework,jtiai/django-rest-framework,uruz/django-rest-framework,callorico/django-rest-framework,kgeorgy/django-rest-framework,hnarayanan/django-rest-framework,akalipetis/django-rest-framework,cyberj/django-rest-framework,jerryhebert/django-rest-framework,uploadcare/django-rest-framework,sheppard/django-rest-framework,MJafarMashhadi/django-rest-framework,antonyc/django-rest-framework,HireAnEsquire/django-rest-framework,cheif/django-rest-framework,werthen/django-rest-framework,ezheidtmann/django-rest-framework,rubendura/django-rest-framework,cyberj/django-rest-framework,nhorelik/django-rest-framework,ticosax/django-rest-framework,johnraz/django-rest-framework,James1345/django-rest-framework,canassa/django-rest-framework,antonyc/django-rest-framework,wzbozon/django-rest-framework,hnarayanan/django-rest-framework,cheif/django-rest-framework,potpath/django-rest-framework,YBJAY00000/django-rest-framework,leeahoward/django-rest-framework,krinart/django-rest-framework,gregmuellegger/django-rest-framework,kezabelle/django-rest-framework,arpheno/django-rest-framework,abdulhaq-e/django-rest-framework,werthen/django-rest-framework,ezheidtmann/django-rest-framework,raphaelmerx/django-rest-framework,mgaitan/django-rest-framework,cyberj/django-rest-framework,aericson/django-rest-framework,elim/django-rest-framework,hnarayanan/django-rest-framework,VishvajitP/django-rest-framework,ambivalentno/django-rest-framework,rafaelcaricio/django-rest-framework,ezheidtmann/django-rest-framework,cheif/django-rest-framework,d0ugal/django-rest-framework,akalipetis/django-rest-framework,alacritythief/django-rest-framework,kylefox/django-rest-framework,agconti/django-rest-framework,lubomir/django-rest-framework,brandoncazander/django-rest-framework,atombrella/django-rest-framework,douwevandermeij/django-rest-framework,jpulec/django-rest-framework,VishvajitP/django-rest-framework,xiaotangyuan/django-rest-framework,ajaali/django-rest-framework,adambain-vokal/django-rest-framework,rafaelang/django-rest-framework,wangpanjun/django-rest-framework,mgaitan/django-rest-framework,thedrow/django-rest-framework-1,pombredanne/django-rest-framework,nryoung/django-rest-framework,wedaly/django-rest-framework,wangpanjun/django-rest-framework,yiyocx/django-rest-framework,damycra/django-rest-framework,uruz/django-rest-framework,rhblind/django-rest-framework,sheppard/django-rest-framework,yiyocx/django-rest-framework,linovia/django-rest-framework,zeldalink0515/django-rest-framework,iheitlager/django-rest-framework,sehmaschine/django-rest-framework,lubomir/django-rest-framework,abdulhaq-e/django-rest-framework,HireAnEsquire/django-rest-framework,johnraz/django-rest-framework,YBJAY00000/django-rest-framework,rubendura/django-rest-framework,jness/django-rest-framework,potpath/django-rest-framework,rafaelang/django-rest-framework,jpulec/django-rest-framework,ticosax/django-rest-framework,leeahoward/django-rest-framework,arpheno/django-rest-framework,ebsaral/django-rest-framework,xiaotangyuan/django-rest-framework,yiyocx/django-rest-framework,ajaali/django-rest-framework,abdulhaq-e/django-rest-framework,douwevandermeij/django-rest-framework,krinart/django-rest-framework,buptlsl/django-rest-framework,brandoncazander/django-rest-framework,kgeorgy/django-rest-framework,ebsaral/django-rest-framework,tigeraniya/django-rest-framework,agconti/django-rest-framework,rubendura/django-rest-framework,uploadcare/django-rest-framework,adambain-vokal/django-rest-framework,alacritythief/django-rest-framework,bluedazzle/django-rest-framework,agconti/django-rest-framework,gregmuellegger/django-rest-framework,aericson/django-rest-framework,rhblind/django-rest-framework,ossanna16/django-rest-framework,johnraz/django-rest-framework,wedaly/django-rest-framework,simudream/django-rest-framework,edx/django-rest-framework,rafaelcaricio/django-rest-framework,paolopaolopaolo/django-rest-framework,antonyc/django-rest-framework,tcroiset/django-rest-framework,nhorelik/django-rest-framework,waytai/django-rest-framework,ashishfinoit/django-rest-framework,vstoykov/django-rest-framework,wzbozon/django-rest-framework,dmwyatt/django-rest-framework,tcroiset/django-rest-framework,uruz/django-rest-framework,ticosax/django-rest-framework,hunter007/django-rest-framework,AlexandreProenca/django-rest-framework,tigeraniya/django-rest-framework,ajaali/django-rest-framework,waytai/django-rest-framework,raphaelmerx/django-rest-framework,kennydude/django-rest-framework,wzbozon/django-rest-framework
|
from django.contrib.auth import authenticate
+ from django.utils.translation import ugettext_lazy as _
+
from rest_framework import serializers
class AuthTokenSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, attrs):
username = attrs.get('username')
password = attrs.get('password')
if username and password:
user = authenticate(username=username, password=password)
if user:
if not user.is_active:
+ msg = _('User account is disabled.')
- raise serializers.ValidationError('User account is disabled.')
+ raise serializers.ValidationError()
attrs['user'] = user
return attrs
else:
- raise serializers.ValidationError('Unable to login with provided credentials.')
+ msg = _('Unable to login with provided credentials.')
+ raise serializers.ValidationError(msg)
else:
- raise serializers.ValidationError('Must include "username" and "password"')
+ msg = _('Must include "username" and "password"')
+ raise serializers.ValidationError(msg)
|
Mark strings in AuthTokenSerializer as translatable
|
## Code Before:
from django.contrib.auth import authenticate
from rest_framework import serializers
class AuthTokenSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, attrs):
username = attrs.get('username')
password = attrs.get('password')
if username and password:
user = authenticate(username=username, password=password)
if user:
if not user.is_active:
raise serializers.ValidationError('User account is disabled.')
attrs['user'] = user
return attrs
else:
raise serializers.ValidationError('Unable to login with provided credentials.')
else:
raise serializers.ValidationError('Must include "username" and "password"')
## Instruction:
Mark strings in AuthTokenSerializer as translatable
## Code After:
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthTokenSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, attrs):
username = attrs.get('username')
password = attrs.get('password')
if username and password:
user = authenticate(username=username, password=password)
if user:
if not user.is_active:
msg = _('User account is disabled.')
raise serializers.ValidationError()
attrs['user'] = user
return attrs
else:
msg = _('Unable to login with provided credentials.')
raise serializers.ValidationError(msg)
else:
msg = _('Must include "username" and "password"')
raise serializers.ValidationError(msg)
|
# ... existing code ...
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
# ... modified code ...
if not user.is_active:
msg = _('User account is disabled.')
raise serializers.ValidationError()
attrs['user'] = user
...
else:
msg = _('Unable to login with provided credentials.')
raise serializers.ValidationError(msg)
else:
msg = _('Must include "username" and "password"')
raise serializers.ValidationError(msg)
# ... rest of the code ...
|
a9755fc4b30629ea2c9db51aa6d4218f99fcabc3
|
frigg/deployments/migrations/0004_auto_20150725_1456.py
|
frigg/deployments/migrations/0004_auto_20150725_1456.py
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default='frigg/frigg-test-base', max_length=255),
),
]
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default=settings.FRIGG_PREVIEW_IMAGE, max_length=255),
),
]
|
Set FRIGG_PREVIEW_IMAGE in db migrations
|
Set FRIGG_PREVIEW_IMAGE in db migrations
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
from __future__ import unicode_literals
+ from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
- field=models.CharField(default='frigg/frigg-test-base', max_length=255),
+ field=models.CharField(default=settings.FRIGG_PREVIEW_IMAGE, max_length=255),
),
]
|
Set FRIGG_PREVIEW_IMAGE in db migrations
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default='frigg/frigg-test-base', max_length=255),
),
]
## Instruction:
Set FRIGG_PREVIEW_IMAGE in db migrations
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0003_prdeployment_start_time'),
]
operations = [
migrations.AlterField(
model_name='prdeployment',
name='image',
field=models.CharField(default=settings.FRIGG_PREVIEW_IMAGE, max_length=255),
),
]
|
...
from django.conf import settings
from django.db import migrations, models
...
name='image',
field=models.CharField(default=settings.FRIGG_PREVIEW_IMAGE, max_length=255),
),
...
|
3ec325afca110e866a5b60e4e92a38738aee4906
|
graphene_django_extras/directives/__init__.py
|
graphene_django_extras/directives/__init__.py
|
from graphql.type.directives import specified_directives as default_directives
from .date import *
from .list import *
from .numbers import *
from .string import *
all_directives = (
# date
DateGraphQLDirective,
# list
ShuffleGraphQLDirective,
SampleGraphQLDirective,
# numbers
FloorGraphQLDirective,
CeilGraphQLDirective,
# string
DefaultGraphQLDirective,
Base64GraphQLDirective,
NumberGraphQLDirective,
CurrencyGraphQLDirective,
LowercaseGraphQLDirective,
UppercaseGraphQLDirective,
CapitalizeGraphQLDirective,
CamelCaseGraphQLDirective,
SnakeCaseGraphQLDirective,
KebabCaseGraphQLDirective,
SwapCaseGraphQLDirective,
StripGraphQLDirective,
TitleCaseGraphQLDirective,
CenterGraphQLDirective,
ReplaceGraphQLDirective,
)
all_directives = [d() for d in all_directives] + default_directives
|
from graphql.type.directives import specified_directives as default_directives
from .date import DateGraphQLDirective
from .list import ShuffleGraphQLDirective, SampleGraphQLDirective
from .numbers import FloorGraphQLDirective, CeilGraphQLDirective
from .string import (
DefaultGraphQLDirective,
Base64GraphQLDirective,
NumberGraphQLDirective,
CurrencyGraphQLDirective,
LowercaseGraphQLDirective,
UppercaseGraphQLDirective,
CapitalizeGraphQLDirective,
CamelCaseGraphQLDirective,
SnakeCaseGraphQLDirective,
KebabCaseGraphQLDirective,
SwapCaseGraphQLDirective,
StripGraphQLDirective,
TitleCaseGraphQLDirective,
CenterGraphQLDirective,
ReplaceGraphQLDirective,
)
all_directives = (
# date
DateGraphQLDirective,
# list
ShuffleGraphQLDirective,
SampleGraphQLDirective,
# numbers
FloorGraphQLDirective,
CeilGraphQLDirective,
# string
DefaultGraphQLDirective,
Base64GraphQLDirective,
NumberGraphQLDirective,
CurrencyGraphQLDirective,
LowercaseGraphQLDirective,
UppercaseGraphQLDirective,
CapitalizeGraphQLDirective,
CamelCaseGraphQLDirective,
SnakeCaseGraphQLDirective,
KebabCaseGraphQLDirective,
SwapCaseGraphQLDirective,
StripGraphQLDirective,
TitleCaseGraphQLDirective,
CenterGraphQLDirective,
ReplaceGraphQLDirective,
)
all_directives = [d() for d in all_directives] + default_directives
|
Make minor improvements for CI.
|
Make minor improvements for CI.
|
Python
|
mit
|
eamigo86/graphene-django-extras
|
from graphql.type.directives import specified_directives as default_directives
- from .date import *
- from .list import *
- from .numbers import *
+ from .date import DateGraphQLDirective
+ from .list import ShuffleGraphQLDirective, SampleGraphQLDirective
+ from .numbers import FloorGraphQLDirective, CeilGraphQLDirective
- from .string import *
+ from .string import (
+ DefaultGraphQLDirective,
+ Base64GraphQLDirective,
+ NumberGraphQLDirective,
+ CurrencyGraphQLDirective,
+ LowercaseGraphQLDirective,
+ UppercaseGraphQLDirective,
+ CapitalizeGraphQLDirective,
+ CamelCaseGraphQLDirective,
+ SnakeCaseGraphQLDirective,
+ KebabCaseGraphQLDirective,
+ SwapCaseGraphQLDirective,
+ StripGraphQLDirective,
+ TitleCaseGraphQLDirective,
+ CenterGraphQLDirective,
+ ReplaceGraphQLDirective,
+ )
all_directives = (
# date
DateGraphQLDirective,
# list
ShuffleGraphQLDirective,
SampleGraphQLDirective,
# numbers
FloorGraphQLDirective,
CeilGraphQLDirective,
# string
DefaultGraphQLDirective,
Base64GraphQLDirective,
NumberGraphQLDirective,
CurrencyGraphQLDirective,
LowercaseGraphQLDirective,
UppercaseGraphQLDirective,
CapitalizeGraphQLDirective,
CamelCaseGraphQLDirective,
SnakeCaseGraphQLDirective,
KebabCaseGraphQLDirective,
SwapCaseGraphQLDirective,
StripGraphQLDirective,
TitleCaseGraphQLDirective,
CenterGraphQLDirective,
ReplaceGraphQLDirective,
)
all_directives = [d() for d in all_directives] + default_directives
|
Make minor improvements for CI.
|
## Code Before:
from graphql.type.directives import specified_directives as default_directives
from .date import *
from .list import *
from .numbers import *
from .string import *
all_directives = (
# date
DateGraphQLDirective,
# list
ShuffleGraphQLDirective,
SampleGraphQLDirective,
# numbers
FloorGraphQLDirective,
CeilGraphQLDirective,
# string
DefaultGraphQLDirective,
Base64GraphQLDirective,
NumberGraphQLDirective,
CurrencyGraphQLDirective,
LowercaseGraphQLDirective,
UppercaseGraphQLDirective,
CapitalizeGraphQLDirective,
CamelCaseGraphQLDirective,
SnakeCaseGraphQLDirective,
KebabCaseGraphQLDirective,
SwapCaseGraphQLDirective,
StripGraphQLDirective,
TitleCaseGraphQLDirective,
CenterGraphQLDirective,
ReplaceGraphQLDirective,
)
all_directives = [d() for d in all_directives] + default_directives
## Instruction:
Make minor improvements for CI.
## Code After:
from graphql.type.directives import specified_directives as default_directives
from .date import DateGraphQLDirective
from .list import ShuffleGraphQLDirective, SampleGraphQLDirective
from .numbers import FloorGraphQLDirective, CeilGraphQLDirective
from .string import (
DefaultGraphQLDirective,
Base64GraphQLDirective,
NumberGraphQLDirective,
CurrencyGraphQLDirective,
LowercaseGraphQLDirective,
UppercaseGraphQLDirective,
CapitalizeGraphQLDirective,
CamelCaseGraphQLDirective,
SnakeCaseGraphQLDirective,
KebabCaseGraphQLDirective,
SwapCaseGraphQLDirective,
StripGraphQLDirective,
TitleCaseGraphQLDirective,
CenterGraphQLDirective,
ReplaceGraphQLDirective,
)
all_directives = (
# date
DateGraphQLDirective,
# list
ShuffleGraphQLDirective,
SampleGraphQLDirective,
# numbers
FloorGraphQLDirective,
CeilGraphQLDirective,
# string
DefaultGraphQLDirective,
Base64GraphQLDirective,
NumberGraphQLDirective,
CurrencyGraphQLDirective,
LowercaseGraphQLDirective,
UppercaseGraphQLDirective,
CapitalizeGraphQLDirective,
CamelCaseGraphQLDirective,
SnakeCaseGraphQLDirective,
KebabCaseGraphQLDirective,
SwapCaseGraphQLDirective,
StripGraphQLDirective,
TitleCaseGraphQLDirective,
CenterGraphQLDirective,
ReplaceGraphQLDirective,
)
all_directives = [d() for d in all_directives] + default_directives
|
# ... existing code ...
from .date import DateGraphQLDirective
from .list import ShuffleGraphQLDirective, SampleGraphQLDirective
from .numbers import FloorGraphQLDirective, CeilGraphQLDirective
from .string import (
DefaultGraphQLDirective,
Base64GraphQLDirective,
NumberGraphQLDirective,
CurrencyGraphQLDirective,
LowercaseGraphQLDirective,
UppercaseGraphQLDirective,
CapitalizeGraphQLDirective,
CamelCaseGraphQLDirective,
SnakeCaseGraphQLDirective,
KebabCaseGraphQLDirective,
SwapCaseGraphQLDirective,
StripGraphQLDirective,
TitleCaseGraphQLDirective,
CenterGraphQLDirective,
ReplaceGraphQLDirective,
)
# ... rest of the code ...
|
e7865a22eb2e7433f3c36cd571aae3ac65436423
|
signage/models.py
|
signage/models.py
|
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeFramedModel
from taggit.managers import TaggableManager
@python_2_unicode_compatible
class Slide(TimeFramedModel):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
image = models.ImageField(
upload_to='slides/',
)
duration = models.PositiveIntegerField(
default=7,
)
weight = models.SmallIntegerField(
default=0,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:slide_update', args=[self.pk])
def get_displays(self):
return Display.objects.filter(tags__name__in=self.tags.names()).distinct()
@python_2_unicode_compatible
class Display(models.Model):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:display_update', args=[self.pk])
def get_slides(self):
return Slide.objects.filter(tags__name__in=self.tags.names()).distinct()
|
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeFramedModel
from taggit.managers import TaggableManager
@python_2_unicode_compatible
class Slide(TimeFramedModel):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
image = models.ImageField(
upload_to='slides/',
)
duration = models.PositiveIntegerField(
default=7,
)
weight = models.SmallIntegerField(
default=0,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:slide_update', args=[self.pk])
def get_displays(self):
return Display.objects.filter(tags__name__in=self.tags.names()).distinct()
@python_2_unicode_compatible
class Display(models.Model):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:display_update', args=[self.pk])
def get_slides(self):
return Slide.objects.filter(tags__name__in=self.tags.names()).order_by('weight').distinct()
|
Order displayed slides by weight
|
Order displayed slides by weight
|
Python
|
bsd-3-clause
|
jbittel/django-signage,jbittel/django-signage,jbittel/django-signage
|
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeFramedModel
from taggit.managers import TaggableManager
@python_2_unicode_compatible
class Slide(TimeFramedModel):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
image = models.ImageField(
upload_to='slides/',
)
duration = models.PositiveIntegerField(
default=7,
)
weight = models.SmallIntegerField(
default=0,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:slide_update', args=[self.pk])
def get_displays(self):
return Display.objects.filter(tags__name__in=self.tags.names()).distinct()
@python_2_unicode_compatible
class Display(models.Model):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:display_update', args=[self.pk])
def get_slides(self):
- return Slide.objects.filter(tags__name__in=self.tags.names()).distinct()
+ return Slide.objects.filter(tags__name__in=self.tags.names()).order_by('weight').distinct()
|
Order displayed slides by weight
|
## Code Before:
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeFramedModel
from taggit.managers import TaggableManager
@python_2_unicode_compatible
class Slide(TimeFramedModel):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
image = models.ImageField(
upload_to='slides/',
)
duration = models.PositiveIntegerField(
default=7,
)
weight = models.SmallIntegerField(
default=0,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:slide_update', args=[self.pk])
def get_displays(self):
return Display.objects.filter(tags__name__in=self.tags.names()).distinct()
@python_2_unicode_compatible
class Display(models.Model):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:display_update', args=[self.pk])
def get_slides(self):
return Slide.objects.filter(tags__name__in=self.tags.names()).distinct()
## Instruction:
Order displayed slides by weight
## Code After:
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeFramedModel
from taggit.managers import TaggableManager
@python_2_unicode_compatible
class Slide(TimeFramedModel):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
image = models.ImageField(
upload_to='slides/',
)
duration = models.PositiveIntegerField(
default=7,
)
weight = models.SmallIntegerField(
default=0,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:slide_update', args=[self.pk])
def get_displays(self):
return Display.objects.filter(tags__name__in=self.tags.names()).distinct()
@python_2_unicode_compatible
class Display(models.Model):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:display_update', args=[self.pk])
def get_slides(self):
return Slide.objects.filter(tags__name__in=self.tags.names()).order_by('weight').distinct()
|
...
def get_slides(self):
return Slide.objects.filter(tags__name__in=self.tags.names()).order_by('weight').distinct()
...
|
0281aaa0868d0bfa6ecb7368cff89b4af6b57129
|
tests/functions_tests/test_dropout.py
|
tests/functions_tests/test_dropout.py
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
@attr.gpu
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
|
Add attr.gpu decorator to gpu test of dropout
|
Add attr.gpu decorator to gpu test of dropout
|
Python
|
mit
|
yanweifu/chainer,hvy/chainer,cupy/cupy,ysekky/chainer,woodshop/complex-chainer,niboshi/chainer,tkerola/chainer,kashif/chainer,kikusu/chainer,jnishi/chainer,okuta/chainer,niboshi/chainer,benob/chainer,chainer/chainer,AlpacaDB/chainer,sou81821/chainer,umitanuki/chainer,tscohen/chainer,cupy/cupy,laysakura/chainer,masia02/chainer,jfsantos/chainer,anaruse/chainer,keisuke-umezawa/chainer,truongdq/chainer,chainer/chainer,wkentaro/chainer,ktnyt/chainer,aonotas/chainer,ikasumi/chainer,kikusu/chainer,AlpacaDB/chainer,sinhrks/chainer,sinhrks/chainer,rezoo/chainer,okuta/chainer,jnishi/chainer,1986ks/chainer,muupan/chainer,ytoyama/yans_chainer_hackathon,minhpqn/chainer,wavelets/chainer,muupan/chainer,ktnyt/chainer,cemoody/chainer,ktnyt/chainer,cupy/cupy,kuwa32/chainer,ktnyt/chainer,hvy/chainer,jnishi/chainer,niboshi/chainer,Kaisuke5/chainer,tigerneil/chainer,ronekko/chainer,cupy/cupy,truongdq/chainer,pfnet/chainer,t-abe/chainer,hvy/chainer,niboshi/chainer,chainer/chainer,okuta/chainer,t-abe/chainer,keisuke-umezawa/chainer,woodshop/chainer,jnishi/chainer,wkentaro/chainer,hidenori-t/chainer,elviswf/chainer,chainer/chainer,keisuke-umezawa/chainer,wkentaro/chainer,hvy/chainer,benob/chainer,delta2323/chainer,kiyukuta/chainer,okuta/chainer,wkentaro/chainer,keisuke-umezawa/chainer
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
+ from chainer.testing import attr
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
+ @attr.gpu
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
|
Add attr.gpu decorator to gpu test of dropout
|
## Code Before:
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
## Instruction:
Add attr.gpu decorator to gpu test of dropout
## Code After:
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
@attr.gpu
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
|
// ... existing code ...
from chainer import testing
from chainer.testing import attr
// ... modified code ...
@attr.gpu
def test_type_forward_gpu(self):
// ... rest of the code ...
|
ba2618e5ca63cad92e17ea154c9d8332064f7771
|
pdvega/_axes.py
|
pdvega/_axes.py
|
from vega3 import VegaLite
class VegaLiteAxes(object):
"""Class representing a pdvega plot axes"""
def __init__(self, spec=None, data=None):
self.vlspec = VegaLite(spec, data)
@property
def spec(self):
return self.vlspec.spec
@property
def data(self):
return self.vlspec.data
def _ipython_display_(self):
return self.vlspec._ipython_display_()
def display(self):
return self.vlspec.display()
|
from vega3 import VegaLite
class VegaLiteAxes(object):
"""Class representing a pdvega plot axes"""
def __init__(self, spec=None, data=None):
self.vlspec = VegaLite(spec, data)
@property
def spec(self):
return self.vlspec.spec
@property
def spec_no_data(self):
return {key: val for key, val in self.spec.items() if key != 'data'}
@property
def data(self):
return self.vlspec.data
def _ipython_display_(self):
return self.vlspec._ipython_display_()
def display(self):
return self.vlspec.display()
|
Add spec_no_data attribute to axes
|
Add spec_no_data attribute to axes
|
Python
|
mit
|
jakevdp/pdvega
|
from vega3 import VegaLite
class VegaLiteAxes(object):
"""Class representing a pdvega plot axes"""
def __init__(self, spec=None, data=None):
self.vlspec = VegaLite(spec, data)
@property
def spec(self):
return self.vlspec.spec
@property
+ def spec_no_data(self):
+ return {key: val for key, val in self.spec.items() if key != 'data'}
+
+ @property
def data(self):
return self.vlspec.data
def _ipython_display_(self):
return self.vlspec._ipython_display_()
def display(self):
return self.vlspec.display()
|
Add spec_no_data attribute to axes
|
## Code Before:
from vega3 import VegaLite
class VegaLiteAxes(object):
"""Class representing a pdvega plot axes"""
def __init__(self, spec=None, data=None):
self.vlspec = VegaLite(spec, data)
@property
def spec(self):
return self.vlspec.spec
@property
def data(self):
return self.vlspec.data
def _ipython_display_(self):
return self.vlspec._ipython_display_()
def display(self):
return self.vlspec.display()
## Instruction:
Add spec_no_data attribute to axes
## Code After:
from vega3 import VegaLite
class VegaLiteAxes(object):
"""Class representing a pdvega plot axes"""
def __init__(self, spec=None, data=None):
self.vlspec = VegaLite(spec, data)
@property
def spec(self):
return self.vlspec.spec
@property
def spec_no_data(self):
return {key: val for key, val in self.spec.items() if key != 'data'}
@property
def data(self):
return self.vlspec.data
def _ipython_display_(self):
return self.vlspec._ipython_display_()
def display(self):
return self.vlspec.display()
|
// ... existing code ...
@property
def spec_no_data(self):
return {key: val for key, val in self.spec.items() if key != 'data'}
@property
def data(self):
// ... rest of the code ...
|
f20eb91dcf04bc8e33fbb48ebfbef1b56acbf02d
|
web.py
|
web.py
|
""" Heroku/Python Quickstart: https://blog.heroku.com/archives/2011/9/28/python_and_django"""
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def home_page():
return 'Hello from the SPARK learn-a-thon!'
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
""" Heroku/Python Quickstart: https://blog.heroku.com/archives/2011/9/28/python_and_django"""
import os
import random
import requests
from flask import Flask
import tweepy
import settings
app = Flask(__name__)
@app.route('/')
def home_page():
return 'Hello from the SPARK learn-a-thon!'
def get_instagram_image():
instagram_api_url = 'https://api.instagram.com/v1/tags/spark/media/recent?client_id={}'.format(settings.CLIENT_ID)
data = requests.get(instagram_api_url).json()['data']
number_of_images = choose_number_of_images()
images_returned = []
for image in number_of_images:
images_returned.append(random.choice(data)['images']['low_resolution']['url'])
return images_returned
def get_tweets():
auth = tweepy.OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET)
auth.set_access_token(settings.ACCESS_KEY, settings.ACCESS_SECRET)
api = tweepy.API(auth)
number_of_tweets = choose_number_of_tweets()
tweets_text = []
tweets = tweepy.Cursor(api.search, q='#spark')
for tweet in tweets.items(limit=number_of_tweets):
tweets_text.append(tweet.text)
return tweets_text
def choose_number_of_images():
number = 3
return number
def choose_number_of_tweets():
number = 3
return number
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
Make functions that pull a number of tweets and pics
|
Make functions that pull a number of tweets and pics
|
Python
|
apache-2.0
|
samanehsan/spark_github,samanehsan/spark_github,samanehsan/learn-git,samanehsan/learn-git
|
""" Heroku/Python Quickstart: https://blog.heroku.com/archives/2011/9/28/python_and_django"""
import os
+ import random
+ import requests
from flask import Flask
+ import tweepy
+
+ import settings
+
+
app = Flask(__name__)
+
@app.route('/')
def home_page():
return 'Hello from the SPARK learn-a-thon!'
+
+ def get_instagram_image():
+ instagram_api_url = 'https://api.instagram.com/v1/tags/spark/media/recent?client_id={}'.format(settings.CLIENT_ID)
+
+ data = requests.get(instagram_api_url).json()['data']
+ number_of_images = choose_number_of_images()
+
+ images_returned = []
+ for image in number_of_images:
+ images_returned.append(random.choice(data)['images']['low_resolution']['url'])
+
+ return images_returned
+
+
+ def get_tweets():
+ auth = tweepy.OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET)
+ auth.set_access_token(settings.ACCESS_KEY, settings.ACCESS_SECRET)
+ api = tweepy.API(auth)
+
+ number_of_tweets = choose_number_of_tweets()
+
+ tweets_text = []
+ tweets = tweepy.Cursor(api.search, q='#spark')
+ for tweet in tweets.items(limit=number_of_tweets):
+ tweets_text.append(tweet.text)
+
+ return tweets_text
+
+
+ def choose_number_of_images():
+ number = 3
+ return number
+
+
+ def choose_number_of_tweets():
+ number = 3
+ return number
+
+
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
Make functions that pull a number of tweets and pics
|
## Code Before:
""" Heroku/Python Quickstart: https://blog.heroku.com/archives/2011/9/28/python_and_django"""
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def home_page():
return 'Hello from the SPARK learn-a-thon!'
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
## Instruction:
Make functions that pull a number of tweets and pics
## Code After:
""" Heroku/Python Quickstart: https://blog.heroku.com/archives/2011/9/28/python_and_django"""
import os
import random
import requests
from flask import Flask
import tweepy
import settings
app = Flask(__name__)
@app.route('/')
def home_page():
return 'Hello from the SPARK learn-a-thon!'
def get_instagram_image():
instagram_api_url = 'https://api.instagram.com/v1/tags/spark/media/recent?client_id={}'.format(settings.CLIENT_ID)
data = requests.get(instagram_api_url).json()['data']
number_of_images = choose_number_of_images()
images_returned = []
for image in number_of_images:
images_returned.append(random.choice(data)['images']['low_resolution']['url'])
return images_returned
def get_tweets():
auth = tweepy.OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET)
auth.set_access_token(settings.ACCESS_KEY, settings.ACCESS_SECRET)
api = tweepy.API(auth)
number_of_tweets = choose_number_of_tweets()
tweets_text = []
tweets = tweepy.Cursor(api.search, q='#spark')
for tweet in tweets.items(limit=number_of_tweets):
tweets_text.append(tweet.text)
return tweets_text
def choose_number_of_images():
number = 3
return number
def choose_number_of_tweets():
number = 3
return number
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
...
import os
import random
import requests
from flask import Flask
...
import tweepy
import settings
app = Flask(__name__)
...
def get_instagram_image():
instagram_api_url = 'https://api.instagram.com/v1/tags/spark/media/recent?client_id={}'.format(settings.CLIENT_ID)
data = requests.get(instagram_api_url).json()['data']
number_of_images = choose_number_of_images()
images_returned = []
for image in number_of_images:
images_returned.append(random.choice(data)['images']['low_resolution']['url'])
return images_returned
def get_tweets():
auth = tweepy.OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET)
auth.set_access_token(settings.ACCESS_KEY, settings.ACCESS_SECRET)
api = tweepy.API(auth)
number_of_tweets = choose_number_of_tweets()
tweets_text = []
tweets = tweepy.Cursor(api.search, q='#spark')
for tweet in tweets.items(limit=number_of_tweets):
tweets_text.append(tweet.text)
return tweets_text
def choose_number_of_images():
number = 3
return number
def choose_number_of_tweets():
number = 3
return number
if __name__ == '__main__':
...
|
1ce0d9898fc31f08bbf5765b3a687eaa8067a465
|
flaskext/flask_scss.py
|
flaskext/flask_scss.py
|
from .scss import Scss
|
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
Python
|
mit
|
bcarlin/flask-scss
|
from .scss import Scss
+ from warnings import warn
+ warn(DeprecationWarning('Deprecated import method. '
+ 'Please use:\n '
+ 'from flask.ext.scss import Scss'), stacklevel=2)
+
|
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
## Code Before:
from .scss import Scss
## Instruction:
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
## Code After:
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
...
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
...
|
fe1b9ad1f65ac27c5bc3d02acaf473f001609e73
|
relayer/flask/__init__.py
|
relayer/flask/__init__.py
|
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
|
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask = None, logging_topic: str = None, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
|
Use default arguments removed by mypy
|
Use default arguments removed by mypy
|
Python
|
mit
|
wizeline/relayer
|
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
- def __init__(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
+ def __init__(self, app: Flask = None, logging_topic: str = None, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
|
Use default arguments removed by mypy
|
## Code Before:
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
## Instruction:
Use default arguments removed by mypy
## Code After:
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask = None, logging_topic: str = None, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
|
...
def __init__(self, app: Flask = None, logging_topic: str = None, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
...
|
9ff005d1c3ffc82e8469f1ecf7dda2d9ebf8bb46
|
Museau/urls.py
|
Museau/urls.py
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# Ajax requests
url(r'^ajax/(?P<action>.*)\.(?P<filetype>.*)$', 'music.ajax.do', name='ajax'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
Remove ajax urlconf since we don't use it anymore
|
Remove ajax urlconf since we don't use it anymore
|
Python
|
mit
|
folz/Museau,folz/Museau
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
- # Ajax requests
-
- url(r'^ajax/(?P<action>.*)\.(?P<filetype>.*)$', 'music.ajax.do', name='ajax'),
-
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
Remove ajax urlconf since we don't use it anymore
|
## Code Before:
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# Ajax requests
url(r'^ajax/(?P<action>.*)\.(?P<filetype>.*)$', 'music.ajax.do', name='ajax'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
## Instruction:
Remove ajax urlconf since we don't use it anymore
## Code After:
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
# ... existing code ...
# django-registration
# ... rest of the code ...
|
d2076f6fd3a0bb687224048de904207c885aba5c
|
utils.py
|
utils.py
|
from functools import wraps
def cached_property(f):
name = f.__name__
@property
@wraps(f)
def inner(self):
if not hasattr(self, "_property_cache"):
self._property_cache = {}
if name not in self._property_cache:
self._property_cache[name] = f(self)
return self._property_cache[name]
return inner
class Constant():
def __init__(self, x):
self.x = x
def __repr__(self):
return self.x
|
from functools import wraps
def cached_property(f):
name = f.__name__
@property
@wraps(f)
def inner(self):
if not hasattr(self, "_property_cache"):
self._property_cache = {}
if name not in self._property_cache:
self._property_cache[name] = f(self)
return self._property_cache[name]
return inner
class Constant():
def __init__(self, x):
self.x = x
def __repr__(self):
return self.x
def constants(namespace, names):
for name in names:
namespace[name] = Constant(name)
|
Make it easier to define constants
|
Make it easier to define constants
|
Python
|
unlicense
|
drkitty/python3-base,drkitty/python3-base
|
from functools import wraps
def cached_property(f):
name = f.__name__
@property
@wraps(f)
def inner(self):
if not hasattr(self, "_property_cache"):
self._property_cache = {}
if name not in self._property_cache:
self._property_cache[name] = f(self)
return self._property_cache[name]
return inner
class Constant():
def __init__(self, x):
self.x = x
def __repr__(self):
return self.x
+
+ def constants(namespace, names):
+ for name in names:
+ namespace[name] = Constant(name)
+
|
Make it easier to define constants
|
## Code Before:
from functools import wraps
def cached_property(f):
name = f.__name__
@property
@wraps(f)
def inner(self):
if not hasattr(self, "_property_cache"):
self._property_cache = {}
if name not in self._property_cache:
self._property_cache[name] = f(self)
return self._property_cache[name]
return inner
class Constant():
def __init__(self, x):
self.x = x
def __repr__(self):
return self.x
## Instruction:
Make it easier to define constants
## Code After:
from functools import wraps
def cached_property(f):
name = f.__name__
@property
@wraps(f)
def inner(self):
if not hasattr(self, "_property_cache"):
self._property_cache = {}
if name not in self._property_cache:
self._property_cache[name] = f(self)
return self._property_cache[name]
return inner
class Constant():
def __init__(self, x):
self.x = x
def __repr__(self):
return self.x
def constants(namespace, names):
for name in names:
namespace[name] = Constant(name)
|
...
return self.x
def constants(namespace, names):
for name in names:
namespace[name] = Constant(name)
...
|
2df886059a9edd8d75fdb255fc185c2f96a02c29
|
user/signals.py
|
user/signals.py
|
import re
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from user import tokens
from user.models import User
REGEX_PATTERN = getattr(settings, 'REGEX_HACKATHON_ORGANIZER_EMAIL', None)
# MAke user organizer if fits regex
@receiver(post_save, sender=User)
def user_organizer(sender, instance, created, *args, **kwargs):
if not REGEX_PATTERN or not created:
return None
if re.match(REGEX_PATTERN, instance.email):
instance.is_organizer = True
instance.save()
# Send user verification
@receiver(post_save, sender=User)
def user_verify_email(sender, instance, created, *args, **kwargs):
if created and not instance.email_verified:
msg = tokens.generate_verify_email(instance)
msg.send()
|
import re
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from user import tokens
from user.models import User
REGEX_PATTERN = getattr(settings, 'REGEX_HACKATHON_ORGANIZER_EMAIL', None)
DEV_EMAILS = getattr(settings, 'HACKATHON_DEV_EMAILS', None)
# Make user organizer or admin if fits regex
@receiver(post_save, sender=User)
def user_organizer(sender, instance, created, *args, **kwargs):
if not created:
return None
if REGEX_PATTERN and re.match(REGEX_PATTERN, instance.email):
instance.is_organizer = True
instance.save()
if DEV_EMAILS and instance.email in DEV_EMAILS:
instance.is_admin = True
instance.save()
# Send user verification
@receiver(post_save, sender=User)
def user_verify_email(sender, instance, created, *args, **kwargs):
if created and not instance.email_verified:
msg = tokens.generate_verify_email(instance)
msg.send()
|
Make developers an admin on registration
|
Make developers an admin on registration
|
Python
|
mit
|
hackupc/backend,hackupc/backend,hackupc/backend,hackupc/backend
|
import re
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from user import tokens
from user.models import User
REGEX_PATTERN = getattr(settings, 'REGEX_HACKATHON_ORGANIZER_EMAIL', None)
+ DEV_EMAILS = getattr(settings, 'HACKATHON_DEV_EMAILS', None)
- # MAke user organizer if fits regex
+ # Make user organizer or admin if fits regex
@receiver(post_save, sender=User)
def user_organizer(sender, instance, created, *args, **kwargs):
- if not REGEX_PATTERN or not created:
+ if not created:
return None
+
- if re.match(REGEX_PATTERN, instance.email):
+ if REGEX_PATTERN and re.match(REGEX_PATTERN, instance.email):
instance.is_organizer = True
+ instance.save()
+
+ if DEV_EMAILS and instance.email in DEV_EMAILS:
+ instance.is_admin = True
instance.save()
# Send user verification
@receiver(post_save, sender=User)
def user_verify_email(sender, instance, created, *args, **kwargs):
if created and not instance.email_verified:
msg = tokens.generate_verify_email(instance)
msg.send()
|
Make developers an admin on registration
|
## Code Before:
import re
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from user import tokens
from user.models import User
REGEX_PATTERN = getattr(settings, 'REGEX_HACKATHON_ORGANIZER_EMAIL', None)
# MAke user organizer if fits regex
@receiver(post_save, sender=User)
def user_organizer(sender, instance, created, *args, **kwargs):
if not REGEX_PATTERN or not created:
return None
if re.match(REGEX_PATTERN, instance.email):
instance.is_organizer = True
instance.save()
# Send user verification
@receiver(post_save, sender=User)
def user_verify_email(sender, instance, created, *args, **kwargs):
if created and not instance.email_verified:
msg = tokens.generate_verify_email(instance)
msg.send()
## Instruction:
Make developers an admin on registration
## Code After:
import re
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from user import tokens
from user.models import User
REGEX_PATTERN = getattr(settings, 'REGEX_HACKATHON_ORGANIZER_EMAIL', None)
DEV_EMAILS = getattr(settings, 'HACKATHON_DEV_EMAILS', None)
# Make user organizer or admin if fits regex
@receiver(post_save, sender=User)
def user_organizer(sender, instance, created, *args, **kwargs):
if not created:
return None
if REGEX_PATTERN and re.match(REGEX_PATTERN, instance.email):
instance.is_organizer = True
instance.save()
if DEV_EMAILS and instance.email in DEV_EMAILS:
instance.is_admin = True
instance.save()
# Send user verification
@receiver(post_save, sender=User)
def user_verify_email(sender, instance, created, *args, **kwargs):
if created and not instance.email_verified:
msg = tokens.generate_verify_email(instance)
msg.send()
|
// ... existing code ...
REGEX_PATTERN = getattr(settings, 'REGEX_HACKATHON_ORGANIZER_EMAIL', None)
DEV_EMAILS = getattr(settings, 'HACKATHON_DEV_EMAILS', None)
// ... modified code ...
# Make user organizer or admin if fits regex
@receiver(post_save, sender=User)
...
def user_organizer(sender, instance, created, *args, **kwargs):
if not created:
return None
if REGEX_PATTERN and re.match(REGEX_PATTERN, instance.email):
instance.is_organizer = True
instance.save()
if DEV_EMAILS and instance.email in DEV_EMAILS:
instance.is_admin = True
instance.save()
// ... rest of the code ...
|
bc4486063325fc18bee00ba3ee8ba4e5e2323bee
|
doc/tools/make_cookbook.py
|
doc/tools/make_cookbook.py
|
import sys
import os
body = r"""
.. raw:: html
[<a href="{code}">source code</a>]
.. literalinclude:: {code}
:language: python
:linenos:
"""
def recipe_to_rst(recipe):
"""
Convert a .py recipe to a .rst entry for sphinx
"""
sys.stderr.write("Converting {} to rst ...".format(recipe))
recipe_file = os.path.split(recipe)[-1]
recipe_name = os.path.splitext(recipe_file)[0]
output = recipe_name + '.rst'
# Get the title from the first lines of the recipe docstring
title = ''
with open(recipe) as f:
for line in f.readlines()[1:]:
line = line.strip()
if line == '"""' or not line:
break
title = ' '.join([title, line])
with open(output, 'w') as f:
f.write('.. _cookbook_{}:\n\n'.format(recipe_name))
f.write(title.strip() + '\n')
f.write('='*len(title) + '\n')
f.write(body.format(
code='../_static/cookbook/{}'.format(recipe_file)))
sys.stderr.write(" done\n")
if __name__ == '__main__':
for recipe in sys.argv[1:]:
recipe_to_rst(recipe)
|
import sys
import os
body = r"""
**Download** source code: :download:`{recipe}<{code}>`
.. literalinclude:: {code}
:language: python
"""
def recipe_to_rst(recipe):
"""
Convert a .py recipe to a .rst entry for sphinx
"""
sys.stderr.write("Converting {} to rst ...".format(recipe))
recipe_file = os.path.split(recipe)[-1]
recipe_name = os.path.splitext(recipe_file)[0]
output = recipe_name + '.rst'
# Get the title from the first lines of the recipe docstring
title = ''
with open(recipe) as f:
for line in f.readlines()[1:]:
line = line.strip()
if line == '"""' or not line:
break
title = ' '.join([title, line])
with open(output, 'w') as f:
f.write('.. _cookbook_{}:\n\n'.format(recipe_name))
f.write(title.strip() + '\n')
f.write('='*len(title) + '\n')
f.write(body.format(
recipe=recipe_file,
code='../_static/cookbook/{}'.format(recipe_file)))
sys.stderr.write(" done\n")
if __name__ == '__main__':
for recipe in sys.argv[1:]:
recipe_to_rst(recipe)
|
Remove line numbers from recipe code
|
Remove line numbers from recipe code
The larger font made the numbers not match the code.
Added better link text to download the recipe.
|
Python
|
bsd-3-clause
|
santis19/fatiando,rafaelmds/fatiando,drandykass/fatiando,eusoubrasileiro/fatiando,eusoubrasileiro/fatiando,eusoubrasileiro/fatiando_seismic,fatiando/fatiando,eusoubrasileiro/fatiando_seismic,victortxa/fatiando,fatiando/fatiando,rafaelmds/fatiando,victortxa/fatiando,cmeessen/fatiando,santis19/fatiando,mtb-za/fatiando,eusoubrasileiro/fatiando,cmeessen/fatiando,mtb-za/fatiando,drandykass/fatiando
|
import sys
import os
body = r"""
- .. raw:: html
- [<a href="{code}">source code</a>]
+ **Download** source code: :download:`{recipe}<{code}>`
.. literalinclude:: {code}
:language: python
- :linenos:
"""
def recipe_to_rst(recipe):
"""
Convert a .py recipe to a .rst entry for sphinx
"""
sys.stderr.write("Converting {} to rst ...".format(recipe))
recipe_file = os.path.split(recipe)[-1]
recipe_name = os.path.splitext(recipe_file)[0]
output = recipe_name + '.rst'
# Get the title from the first lines of the recipe docstring
title = ''
with open(recipe) as f:
for line in f.readlines()[1:]:
line = line.strip()
if line == '"""' or not line:
break
title = ' '.join([title, line])
with open(output, 'w') as f:
f.write('.. _cookbook_{}:\n\n'.format(recipe_name))
f.write(title.strip() + '\n')
f.write('='*len(title) + '\n')
f.write(body.format(
+ recipe=recipe_file,
code='../_static/cookbook/{}'.format(recipe_file)))
sys.stderr.write(" done\n")
if __name__ == '__main__':
for recipe in sys.argv[1:]:
recipe_to_rst(recipe)
|
Remove line numbers from recipe code
|
## Code Before:
import sys
import os
body = r"""
.. raw:: html
[<a href="{code}">source code</a>]
.. literalinclude:: {code}
:language: python
:linenos:
"""
def recipe_to_rst(recipe):
"""
Convert a .py recipe to a .rst entry for sphinx
"""
sys.stderr.write("Converting {} to rst ...".format(recipe))
recipe_file = os.path.split(recipe)[-1]
recipe_name = os.path.splitext(recipe_file)[0]
output = recipe_name + '.rst'
# Get the title from the first lines of the recipe docstring
title = ''
with open(recipe) as f:
for line in f.readlines()[1:]:
line = line.strip()
if line == '"""' or not line:
break
title = ' '.join([title, line])
with open(output, 'w') as f:
f.write('.. _cookbook_{}:\n\n'.format(recipe_name))
f.write(title.strip() + '\n')
f.write('='*len(title) + '\n')
f.write(body.format(
code='../_static/cookbook/{}'.format(recipe_file)))
sys.stderr.write(" done\n")
if __name__ == '__main__':
for recipe in sys.argv[1:]:
recipe_to_rst(recipe)
## Instruction:
Remove line numbers from recipe code
## Code After:
import sys
import os
body = r"""
**Download** source code: :download:`{recipe}<{code}>`
.. literalinclude:: {code}
:language: python
"""
def recipe_to_rst(recipe):
"""
Convert a .py recipe to a .rst entry for sphinx
"""
sys.stderr.write("Converting {} to rst ...".format(recipe))
recipe_file = os.path.split(recipe)[-1]
recipe_name = os.path.splitext(recipe_file)[0]
output = recipe_name + '.rst'
# Get the title from the first lines of the recipe docstring
title = ''
with open(recipe) as f:
for line in f.readlines()[1:]:
line = line.strip()
if line == '"""' or not line:
break
title = ' '.join([title, line])
with open(output, 'w') as f:
f.write('.. _cookbook_{}:\n\n'.format(recipe_name))
f.write(title.strip() + '\n')
f.write('='*len(title) + '\n')
f.write(body.format(
recipe=recipe_file,
code='../_static/cookbook/{}'.format(recipe_file)))
sys.stderr.write(" done\n")
if __name__ == '__main__':
for recipe in sys.argv[1:]:
recipe_to_rst(recipe)
|
# ... existing code ...
body = r"""
**Download** source code: :download:`{recipe}<{code}>`
# ... modified code ...
:language: python
"""
...
f.write(body.format(
recipe=recipe_file,
code='../_static/cookbook/{}'.format(recipe_file)))
# ... rest of the code ...
|
c498bb6ac7a80ac2668fef22fa6600de6fc9af89
|
dakota/plugins/base.py
|
dakota/plugins/base.py
|
"""An abstract base class for all Dakota component plugins."""
from abc import ABCMeta, abstractmethod
class PluginBase(object):
"""Describe features common to all Dakota plugins."""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, **kwargs):
"""Define default attributes."""
pass
@abstractmethod
def setup(self):
"""Configure component inputs."""
pass
@abstractmethod
def call(self):
"""Call the component through the shell."""
pass
@abstractmethod
def load(self):
"""Read data from a component output file."""
pass
@abstractmethod
def calculate(self):
"""Calculate Dakota response functions."""
pass
@abstractmethod
def write(self):
"""Write a Dakota results file."""
pass
|
"""An abstract base class for all Dakota component plugins."""
from abc import ABCMeta, abstractmethod
class PluginBase(object):
"""Describe features common to all Dakota plugins."""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, **kwargs):
"""Define default attributes."""
pass
@abstractmethod
def setup(self, config):
"""Configure component inputs.
Sets attributes using information from the run configuration
file. The Dakota parsing utility ``dprepro`` reads parameters
from Dakota to create a new input file from a template.
Parameters
----------
config : dict
Stores configuration settings for a Dakota experiment.
"""
pass
@abstractmethod
def call(self):
"""Call the component through the shell."""
pass
@abstractmethod
def load(self, output_file):
"""Read data from a component output file.
Parameters
----------
output_file : str
The path to a component output file.
Returns
-------
array_like
A numpy array, or None on an error.
"""
pass
@abstractmethod
def calculate(self):
"""Calculate Dakota response functions."""
pass
@abstractmethod
def write(self, params_file, results_file):
"""Write a Dakota results file.
Parameters
----------
params_file : str
A Dakota parameters file.
results_file : str
A Dakota results file.
"""
pass
|
Update argument lists for abstract methods
|
Update argument lists for abstract methods
|
Python
|
mit
|
csdms/dakota,csdms/dakota
|
"""An abstract base class for all Dakota component plugins."""
from abc import ABCMeta, abstractmethod
class PluginBase(object):
"""Describe features common to all Dakota plugins."""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, **kwargs):
"""Define default attributes."""
pass
@abstractmethod
- def setup(self):
+ def setup(self, config):
- """Configure component inputs."""
+ """Configure component inputs.
+
+ Sets attributes using information from the run configuration
+ file. The Dakota parsing utility ``dprepro`` reads parameters
+ from Dakota to create a new input file from a template.
+
+ Parameters
+ ----------
+ config : dict
+ Stores configuration settings for a Dakota experiment.
+
+ """
pass
@abstractmethod
def call(self):
"""Call the component through the shell."""
pass
@abstractmethod
- def load(self):
+ def load(self, output_file):
- """Read data from a component output file."""
+ """Read data from a component output file.
+
+ Parameters
+ ----------
+ output_file : str
+ The path to a component output file.
+
+ Returns
+ -------
+ array_like
+ A numpy array, or None on an error.
+
+ """
pass
@abstractmethod
def calculate(self):
"""Calculate Dakota response functions."""
pass
@abstractmethod
- def write(self):
+ def write(self, params_file, results_file):
- """Write a Dakota results file."""
+ """Write a Dakota results file.
+
+ Parameters
+ ----------
+ params_file : str
+ A Dakota parameters file.
+ results_file : str
+ A Dakota results file.
+
+ """
pass
|
Update argument lists for abstract methods
|
## Code Before:
"""An abstract base class for all Dakota component plugins."""
from abc import ABCMeta, abstractmethod
class PluginBase(object):
"""Describe features common to all Dakota plugins."""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, **kwargs):
"""Define default attributes."""
pass
@abstractmethod
def setup(self):
"""Configure component inputs."""
pass
@abstractmethod
def call(self):
"""Call the component through the shell."""
pass
@abstractmethod
def load(self):
"""Read data from a component output file."""
pass
@abstractmethod
def calculate(self):
"""Calculate Dakota response functions."""
pass
@abstractmethod
def write(self):
"""Write a Dakota results file."""
pass
## Instruction:
Update argument lists for abstract methods
## Code After:
"""An abstract base class for all Dakota component plugins."""
from abc import ABCMeta, abstractmethod
class PluginBase(object):
"""Describe features common to all Dakota plugins."""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, **kwargs):
"""Define default attributes."""
pass
@abstractmethod
def setup(self, config):
"""Configure component inputs.
Sets attributes using information from the run configuration
file. The Dakota parsing utility ``dprepro`` reads parameters
from Dakota to create a new input file from a template.
Parameters
----------
config : dict
Stores configuration settings for a Dakota experiment.
"""
pass
@abstractmethod
def call(self):
"""Call the component through the shell."""
pass
@abstractmethod
def load(self, output_file):
"""Read data from a component output file.
Parameters
----------
output_file : str
The path to a component output file.
Returns
-------
array_like
A numpy array, or None on an error.
"""
pass
@abstractmethod
def calculate(self):
"""Calculate Dakota response functions."""
pass
@abstractmethod
def write(self, params_file, results_file):
"""Write a Dakota results file.
Parameters
----------
params_file : str
A Dakota parameters file.
results_file : str
A Dakota results file.
"""
pass
|
# ... existing code ...
@abstractmethod
def setup(self, config):
"""Configure component inputs.
Sets attributes using information from the run configuration
file. The Dakota parsing utility ``dprepro`` reads parameters
from Dakota to create a new input file from a template.
Parameters
----------
config : dict
Stores configuration settings for a Dakota experiment.
"""
pass
# ... modified code ...
@abstractmethod
def load(self, output_file):
"""Read data from a component output file.
Parameters
----------
output_file : str
The path to a component output file.
Returns
-------
array_like
A numpy array, or None on an error.
"""
pass
...
@abstractmethod
def write(self, params_file, results_file):
"""Write a Dakota results file.
Parameters
----------
params_file : str
A Dakota parameters file.
results_file : str
A Dakota results file.
"""
pass
# ... rest of the code ...
|
a329770bdd5fdc6a646d6a0b298f0a67c789f86a
|
resolwe/flow/migrations/0029_storage_m2m.py
|
resolwe/flow/migrations/0029_storage_m2m.py
|
from __future__ import unicode_literals
from django.db import migrations, models
def set_data_relation(apps, schema_editor):
Data = apps.get_model('flow', 'Data')
Storage = apps.get_model('flow', 'Storage')
for data in Data.objects.all():
storage = Storage.objects.filter(data_migration_temporary=data).first()
if storage:
storage.data.add(data)
class Migration(migrations.Migration):
dependencies = [
('flow', '0028_add_data_location'),
]
operations = [
migrations.RenameField(
model_name='storage',
old_name='data',
new_name='data_migration_temporary',
),
migrations.AddField(
model_name='storage',
name='data',
field=models.ManyToManyField(related_name='storages', to='flow.Data'),
),
migrations.RunPython(set_data_relation),
migrations.RemoveField(
model_name='storage',
name='data_migration_temporary',
),
]
|
from __future__ import unicode_literals
from django.db import migrations, models
def set_data_relation(apps, schema_editor):
Storage = apps.get_model('flow', 'Storage')
for storage in Storage.objects.all():
storage.data.add(storage.data_migration_temporary)
class Migration(migrations.Migration):
dependencies = [
('flow', '0028_add_data_location'),
]
operations = [
migrations.RenameField(
model_name='storage',
old_name='data',
new_name='data_migration_temporary',
),
migrations.AddField(
model_name='storage',
name='data',
field=models.ManyToManyField(related_name='storages', to='flow.Data'),
),
migrations.RunPython(set_data_relation),
migrations.RemoveField(
model_name='storage',
name='data_migration_temporary',
),
]
|
Fix storage migration to process all storages
|
Fix storage migration to process all storages
|
Python
|
apache-2.0
|
genialis/resolwe,genialis/resolwe
|
from __future__ import unicode_literals
from django.db import migrations, models
def set_data_relation(apps, schema_editor):
- Data = apps.get_model('flow', 'Data')
Storage = apps.get_model('flow', 'Storage')
- for data in Data.objects.all():
+ for storage in Storage.objects.all():
+ storage.data.add(storage.data_migration_temporary)
- storage = Storage.objects.filter(data_migration_temporary=data).first()
- if storage:
- storage.data.add(data)
class Migration(migrations.Migration):
dependencies = [
('flow', '0028_add_data_location'),
]
operations = [
migrations.RenameField(
model_name='storage',
old_name='data',
new_name='data_migration_temporary',
),
migrations.AddField(
model_name='storage',
name='data',
field=models.ManyToManyField(related_name='storages', to='flow.Data'),
),
migrations.RunPython(set_data_relation),
migrations.RemoveField(
model_name='storage',
name='data_migration_temporary',
),
]
|
Fix storage migration to process all storages
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
def set_data_relation(apps, schema_editor):
Data = apps.get_model('flow', 'Data')
Storage = apps.get_model('flow', 'Storage')
for data in Data.objects.all():
storage = Storage.objects.filter(data_migration_temporary=data).first()
if storage:
storage.data.add(data)
class Migration(migrations.Migration):
dependencies = [
('flow', '0028_add_data_location'),
]
operations = [
migrations.RenameField(
model_name='storage',
old_name='data',
new_name='data_migration_temporary',
),
migrations.AddField(
model_name='storage',
name='data',
field=models.ManyToManyField(related_name='storages', to='flow.Data'),
),
migrations.RunPython(set_data_relation),
migrations.RemoveField(
model_name='storage',
name='data_migration_temporary',
),
]
## Instruction:
Fix storage migration to process all storages
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
def set_data_relation(apps, schema_editor):
Storage = apps.get_model('flow', 'Storage')
for storage in Storage.objects.all():
storage.data.add(storage.data_migration_temporary)
class Migration(migrations.Migration):
dependencies = [
('flow', '0028_add_data_location'),
]
operations = [
migrations.RenameField(
model_name='storage',
old_name='data',
new_name='data_migration_temporary',
),
migrations.AddField(
model_name='storage',
name='data',
field=models.ManyToManyField(related_name='storages', to='flow.Data'),
),
migrations.RunPython(set_data_relation),
migrations.RemoveField(
model_name='storage',
name='data_migration_temporary',
),
]
|
// ... existing code ...
def set_data_relation(apps, schema_editor):
Storage = apps.get_model('flow', 'Storage')
// ... modified code ...
for storage in Storage.objects.all():
storage.data.add(storage.data_migration_temporary)
// ... rest of the code ...
|
5da62bbe9df92df58dea742120f4e78555509bd0
|
lib/log_processor.py
|
lib/log_processor.py
|
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(self.conf['file_name'], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
|
import glob
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(glob.glob(self.conf['file_name'])[0], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
|
Support file globbing for log processor since names could be dynamic (based on hostname, etc.).
|
Support file globbing for log processor since names could be dynamic (based on hostname, etc.).
|
Python
|
mit
|
mk23/snmpy,mk23/snmpy
|
+ import glob
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
- for line in snmpy.plugin.tail(self.conf['file_name'], True):
+ for line in snmpy.plugin.tail(glob.glob(self.conf['file_name'])[0], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
|
Support file globbing for log processor since names could be dynamic (based on hostname, etc.).
|
## Code Before:
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(self.conf['file_name'], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
## Instruction:
Support file globbing for log processor since names could be dynamic (based on hostname, etc.).
## Code After:
import glob
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(glob.glob(self.conf['file_name'])[0], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
|
...
import glob
import re
...
def tail(self):
for line in snmpy.plugin.tail(glob.glob(self.conf['file_name'])[0], True):
if line is True:
...
|
87844a776c2d409bdf7eaa99da06d07d77d7098e
|
tests/test_gingerit.py
|
tests/test_gingerit.py
|
import pytest
from gingerit.gingerit import GingerIt
@pytest.mark.parametrize("text,expected", [
(
"The smelt of fliwers bring back memories.",
"The smell of flowers brings back memories."
),
(
"Edwards will be sck yesterday",
"Edwards was sick yesterday"
),
(
"Edwards was sick yesterday.",
"Edwards was sick yesterday."
),
(
"",
""
)
])
def test_gingerit(text, expected):
parser = GingerIt()
assert parser.parse(text)["result"] == expected
|
import pytest
from gingerit.gingerit import GingerIt
@pytest.mark.parametrize("text,expected,corrections", [
(
"The smelt of fliwers bring back memories.",
"The smell of flowers brings back memories.",
[
{'start': 21, 'definition': None, 'correct': u'brings', 'text': 'bring'},
{'start': 13, 'definition': u'a plant cultivated for its blooms or blossoms', 'correct': u'flowers',
'text': 'fliwers'},
{'start': 4, 'definition': None, 'correct': u'smell', 'text': 'smelt'}
]
),
(
"Edwards will be sck yesterday",
"Edwards was sick yesterday",
[
{'start': 16, 'definition': u'affected by an impairment of normal physical or mental function',
'correct': u'sick', 'text': 'sck'},
{'start': 8, 'definition': None, 'correct': u'was', 'text': 'will be'}
]
),
(
"Edwards was sick yesterday.",
"Edwards was sick yesterday.",
[]
),
(
"",
"",
[]
)
])
def test_gingerit(text, expected, corrections):
output = GingerIt().parse(text)
assert output["result"] == expected
assert output["corrections"] == corrections
|
Extend test to cover corrections output
|
Extend test to cover corrections output
|
Python
|
mit
|
Azd325/gingerit
|
import pytest
from gingerit.gingerit import GingerIt
- @pytest.mark.parametrize("text,expected", [
+ @pytest.mark.parametrize("text,expected,corrections", [
(
"The smelt of fliwers bring back memories.",
- "The smell of flowers brings back memories."
+ "The smell of flowers brings back memories.",
+ [
+ {'start': 21, 'definition': None, 'correct': u'brings', 'text': 'bring'},
+ {'start': 13, 'definition': u'a plant cultivated for its blooms or blossoms', 'correct': u'flowers',
+ 'text': 'fliwers'},
+ {'start': 4, 'definition': None, 'correct': u'smell', 'text': 'smelt'}
+ ]
),
(
"Edwards will be sck yesterday",
- "Edwards was sick yesterday"
+ "Edwards was sick yesterday",
+ [
+ {'start': 16, 'definition': u'affected by an impairment of normal physical or mental function',
+ 'correct': u'sick', 'text': 'sck'},
+ {'start': 8, 'definition': None, 'correct': u'was', 'text': 'will be'}
+ ]
),
(
"Edwards was sick yesterday.",
- "Edwards was sick yesterday."
+ "Edwards was sick yesterday.",
+ []
),
(
"",
- ""
+ "",
+ []
)
])
- def test_gingerit(text, expected):
+ def test_gingerit(text, expected, corrections):
+ output = GingerIt().parse(text)
- parser = GingerIt()
- assert parser.parse(text)["result"] == expected
+ assert output["result"] == expected
+ assert output["corrections"] == corrections
+
|
Extend test to cover corrections output
|
## Code Before:
import pytest
from gingerit.gingerit import GingerIt
@pytest.mark.parametrize("text,expected", [
(
"The smelt of fliwers bring back memories.",
"The smell of flowers brings back memories."
),
(
"Edwards will be sck yesterday",
"Edwards was sick yesterday"
),
(
"Edwards was sick yesterday.",
"Edwards was sick yesterday."
),
(
"",
""
)
])
def test_gingerit(text, expected):
parser = GingerIt()
assert parser.parse(text)["result"] == expected
## Instruction:
Extend test to cover corrections output
## Code After:
import pytest
from gingerit.gingerit import GingerIt
@pytest.mark.parametrize("text,expected,corrections", [
(
"The smelt of fliwers bring back memories.",
"The smell of flowers brings back memories.",
[
{'start': 21, 'definition': None, 'correct': u'brings', 'text': 'bring'},
{'start': 13, 'definition': u'a plant cultivated for its blooms or blossoms', 'correct': u'flowers',
'text': 'fliwers'},
{'start': 4, 'definition': None, 'correct': u'smell', 'text': 'smelt'}
]
),
(
"Edwards will be sck yesterday",
"Edwards was sick yesterday",
[
{'start': 16, 'definition': u'affected by an impairment of normal physical or mental function',
'correct': u'sick', 'text': 'sck'},
{'start': 8, 'definition': None, 'correct': u'was', 'text': 'will be'}
]
),
(
"Edwards was sick yesterday.",
"Edwards was sick yesterday.",
[]
),
(
"",
"",
[]
)
])
def test_gingerit(text, expected, corrections):
output = GingerIt().parse(text)
assert output["result"] == expected
assert output["corrections"] == corrections
|
# ... existing code ...
@pytest.mark.parametrize("text,expected,corrections", [
(
# ... modified code ...
"The smelt of fliwers bring back memories.",
"The smell of flowers brings back memories.",
[
{'start': 21, 'definition': None, 'correct': u'brings', 'text': 'bring'},
{'start': 13, 'definition': u'a plant cultivated for its blooms or blossoms', 'correct': u'flowers',
'text': 'fliwers'},
{'start': 4, 'definition': None, 'correct': u'smell', 'text': 'smelt'}
]
),
...
"Edwards will be sck yesterday",
"Edwards was sick yesterday",
[
{'start': 16, 'definition': u'affected by an impairment of normal physical or mental function',
'correct': u'sick', 'text': 'sck'},
{'start': 8, 'definition': None, 'correct': u'was', 'text': 'will be'}
]
),
...
"Edwards was sick yesterday.",
"Edwards was sick yesterday.",
[]
),
...
"",
"",
[]
)
...
])
def test_gingerit(text, expected, corrections):
output = GingerIt().parse(text)
assert output["result"] == expected
assert output["corrections"] == corrections
# ... rest of the code ...
|
31073969ed99dd6f57ff1959c050fd0f8f59f58c
|
tests/scipy_argrelextrema.py
|
tests/scipy_argrelextrema.py
|
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
|
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
# To get number of peaks:
# print("{} peaks".format(len(indexes[0])))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
|
Add eg to get number of peaks
|
Add eg to get number of peaks
|
Python
|
mit
|
MonsieurV/py-findpeaks,MonsieurV/py-findpeaks
|
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
+ # To get number of peaks:
+ # print("{} peaks".format(len(indexes[0])))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
|
Add eg to get number of peaks
|
## Code Before:
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
## Instruction:
Add eg to get number of peaks
## Code After:
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
# To get number of peaks:
# print("{} peaks".format(len(indexes[0])))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
|
# ... existing code ...
print('Peaks are: %s' % (indexes[0]))
# To get number of peaks:
# print("{} peaks".format(len(indexes[0])))
plot_peaks(
# ... rest of the code ...
|
91141713b672f56a8c45f0250b7e9216a69237f8
|
features/support/splinter_client.py
|
features/support/splinter_client.py
|
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs')
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs', wait_time=15)
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
Increase splinter wait time to 15 seconds
|
Increase splinter wait time to 15 seconds
@gtrogers
@maxfliri
|
Python
|
mit
|
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
|
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
- self.browser = Browser('phantomjs')
+ self.browser = Browser('phantomjs', wait_time=15)
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
Increase splinter wait time to 15 seconds
|
## Code Before:
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs')
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
## Instruction:
Increase splinter wait time to 15 seconds
## Code After:
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs', wait_time=15)
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
# ... existing code ...
def before_scenario(self):
self.browser = Browser('phantomjs', wait_time=15)
# ... rest of the code ...
|
763939db37e3b9f93f1201aada4e893bbe478249
|
17-createWallOfWoolWithRandomColour.py
|
17-createWallOfWoolWithRandomColour.py
|
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import *
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
#Generate a random number within the allowed range of colours
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
print("Creating block at", blockXposn, blockYposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, WOOL.withData(randomNumber))
sleep(0.5)
|
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import WOOL
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
# create a function to create a random block of wool
def getWoolBlockWithRandomColour():
#Generate a random number within the allowed range of colours (0 to 15 inclusive)
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
block = WOOL.withData(randomNumber)
return block
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
print("Creating block at", blockXposn, blockYposn, blockZposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, getWoolBlockWithRandomColour())
sleep(0.5)
|
Change to use function for block generation
|
Change to use function for block generation
Use a separate function for random block generation
move code from loop to the function
|
Python
|
bsd-3-clause
|
hashbangstudio/Python-Minecraft-Examples
|
from mcpi.minecraft import *
# import needed block defintiions
- from mcpi.block import *
+ from mcpi.block import WOOL
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
+
+ # create a function to create a random block of wool
+ def getWoolBlockWithRandomColour():
+ #Generate a random number within the allowed range of colours (0 to 15 inclusive)
+ randomNumber = randint(0,15)
+ print("random number to be used = "+ str(randomNumber))
+ block = WOOL.withData(randomNumber)
+ return block
+
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
- #Generate a random number within the allowed range of colours
- randomNumber = randint(0,15)
- print("random number to be used = "+ str(randomNumber))
- print("Creating block at", blockXposn, blockYposn)
+ print("Creating block at", blockXposn, blockYposn, blockZposn)
# Create a block
- mc.setBlock(blockXposn, blockYposn, blockZposn, WOOL.withData(randomNumber))
+ mc.setBlock(blockXposn, blockYposn, blockZposn, getWoolBlockWithRandomColour())
sleep(0.5)
|
Change to use function for block generation
|
## Code Before:
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import *
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
#Generate a random number within the allowed range of colours
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
print("Creating block at", blockXposn, blockYposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, WOOL.withData(randomNumber))
sleep(0.5)
## Instruction:
Change to use function for block generation
## Code After:
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import WOOL
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
# create a function to create a random block of wool
def getWoolBlockWithRandomColour():
#Generate a random number within the allowed range of colours (0 to 15 inclusive)
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
block = WOOL.withData(randomNumber)
return block
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
print("Creating block at", blockXposn, blockYposn, blockZposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, getWoolBlockWithRandomColour())
sleep(0.5)
|
...
# import needed block defintiions
from mcpi.block import WOOL
# needed to slow down the wall building
...
from random import randint
# create a function to create a random block of wool
def getWoolBlockWithRandomColour():
#Generate a random number within the allowed range of colours (0 to 15 inclusive)
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
block = WOOL.withData(randomNumber)
return block
...
blockXposn += 1
print("Creating block at", blockXposn, blockYposn, blockZposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, getWoolBlockWithRandomColour())
sleep(0.5)
...
|
9310be1429109f5324502f7e66318e23f5ea489d
|
test/test_terminate_handler.py
|
test/test_terminate_handler.py
|
import uuid
from handler_fixture import StationHandlerTestCase
from groundstation.transfer.request_handlers import handle_fetchobject
from groundstation.transfer.response_handlers import handle_terminate
class TestHandlerTerminate(StationHandlerTestCase):
def test_handle_terminate(self):
# Write an object into the station
oid = self.station.station.write("butts lol")
self.station.payload = oid
self.station.id = uuid.uuid1()
self.assertEqual(len(self.station.station.registry.contents), 0)
self.station.station.register_request(self.station)
self.assertEqual(len(self.station.station.registry.contents), 1)
handle_fetchobject(self.station)
term = self.station.stream.pop()
handle_terminate(term)
self.assertEqual(len(self.station.station.registry.contents), 0)
|
import uuid
from handler_fixture import StationHandlerTestCase
from groundstation.transfer.request_handlers import handle_fetchobject
from groundstation.transfer.response_handlers import handle_terminate
class TestHandlerTerminate(StationHandlerTestCase):
def test_handle_terminate(self):
# Write an object into the station
oid = self.station.station.write("butts lol")
self.station.payload = oid
self.station.id = uuid.uuid1()
self.assertEqual(len(self.station.station.registry.contents), 0)
self.station.station.register_request(self.station)
self.assertEqual(len(self.station.station.registry.contents), 1)
handle_fetchobject(self.station)
ret = [0]
def _teardown():
ret[0] += 1
self.station.teardown = _teardown
term = self.station.stream.pop()
handle_terminate(term)
self.assertEqual(len(self.station.station.registry.contents), 0)
self.assertEqual(ret[0], 1)
|
Test that teardown methods are actually called
|
Test that teardown methods are actually called
|
Python
|
mit
|
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
|
import uuid
from handler_fixture import StationHandlerTestCase
from groundstation.transfer.request_handlers import handle_fetchobject
from groundstation.transfer.response_handlers import handle_terminate
class TestHandlerTerminate(StationHandlerTestCase):
def test_handle_terminate(self):
# Write an object into the station
oid = self.station.station.write("butts lol")
self.station.payload = oid
self.station.id = uuid.uuid1()
self.assertEqual(len(self.station.station.registry.contents), 0)
self.station.station.register_request(self.station)
self.assertEqual(len(self.station.station.registry.contents), 1)
handle_fetchobject(self.station)
+ ret = [0]
+
+ def _teardown():
+ ret[0] += 1
+ self.station.teardown = _teardown
term = self.station.stream.pop()
handle_terminate(term)
self.assertEqual(len(self.station.station.registry.contents), 0)
+ self.assertEqual(ret[0], 1)
|
Test that teardown methods are actually called
|
## Code Before:
import uuid
from handler_fixture import StationHandlerTestCase
from groundstation.transfer.request_handlers import handle_fetchobject
from groundstation.transfer.response_handlers import handle_terminate
class TestHandlerTerminate(StationHandlerTestCase):
def test_handle_terminate(self):
# Write an object into the station
oid = self.station.station.write("butts lol")
self.station.payload = oid
self.station.id = uuid.uuid1()
self.assertEqual(len(self.station.station.registry.contents), 0)
self.station.station.register_request(self.station)
self.assertEqual(len(self.station.station.registry.contents), 1)
handle_fetchobject(self.station)
term = self.station.stream.pop()
handle_terminate(term)
self.assertEqual(len(self.station.station.registry.contents), 0)
## Instruction:
Test that teardown methods are actually called
## Code After:
import uuid
from handler_fixture import StationHandlerTestCase
from groundstation.transfer.request_handlers import handle_fetchobject
from groundstation.transfer.response_handlers import handle_terminate
class TestHandlerTerminate(StationHandlerTestCase):
def test_handle_terminate(self):
# Write an object into the station
oid = self.station.station.write("butts lol")
self.station.payload = oid
self.station.id = uuid.uuid1()
self.assertEqual(len(self.station.station.registry.contents), 0)
self.station.station.register_request(self.station)
self.assertEqual(len(self.station.station.registry.contents), 1)
handle_fetchobject(self.station)
ret = [0]
def _teardown():
ret[0] += 1
self.station.teardown = _teardown
term = self.station.stream.pop()
handle_terminate(term)
self.assertEqual(len(self.station.station.registry.contents), 0)
self.assertEqual(ret[0], 1)
|
...
handle_fetchobject(self.station)
ret = [0]
def _teardown():
ret[0] += 1
self.station.teardown = _teardown
...
self.assertEqual(len(self.station.station.registry.contents), 0)
self.assertEqual(ret[0], 1)
...
|
8b3c438b3f5fb9b2538a30182dd4f5d306aa098b
|
ankieta/contact/forms.py
|
ankieta/contact/forms.py
|
from django import forms
from django.core.mail import mail_managers
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from .models import Contact
class ContactForm(forms.Form):
personsList = forms.ModelChoiceField(required=True, label=_("Contact person"),
queryset=Contact.objects.all())
topic = forms.CharField(required=True, max_length=150,
label=_("Topic of messages"))
body = forms.CharField(required=True, widget=forms.Textarea(), label=_("Content"))
email = forms.EmailField(required=True, label=_("E-mail"))
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('contact:form')
self.helper.form_method = 'post'
self.helper.add_input(Submit('submit', _('Send'), css_class="btn-lg btn-block"))
def get_text(self):
return "%(body)s \n\nE-mail: %(email)s" % self.cleaned_data
def send(self):
mail_managers(self.cleaned_data['topic'], self.get_text())
|
from django import forms
from django.core.mail import send_mail
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django.conf import settings
from .models import Contact
def my_mail_send(subject, recipient, message):
subject = '%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject)
from_email = settings.SERVER_EMAIL
return send_mail(subject, message, from_email, [recipient])
class ContactForm(forms.Form):
recipient = forms.ModelChoiceField(required=True, label=_("Contact person"),
queryset=Contact.objects.all())
topic = forms.CharField(required=True, max_length=150,
label=_("Topic of messages"))
body = forms.CharField(required=True, widget=forms.Textarea(), label=_("Content"))
email = forms.EmailField(required=True, label=_("E-mail"))
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('contact:form')
self.helper.form_method = 'post'
self.helper.add_input(Submit('submit', _('Send'), css_class="btn-lg btn-block"))
def get_text(self):
return "%(body)s \n\nE-mail: %(email)s" % self.cleaned_data
def send(self):
my_mail_send(subject=self.cleaned_data['topic'],
recipient=self.cleaned_data['recipient'].email,
message=self.get_text())
|
Fix contact form - send to recipient, not managers
|
Fix contact form - send to recipient, not managers
|
Python
|
bsd-3-clause
|
watchdogpolska/prezydent.siecobywatelska.pl,watchdogpolska/prezydent.siecobywatelska.pl,watchdogpolska/prezydent.siecobywatelska.pl
|
from django import forms
- from django.core.mail import mail_managers
+ from django.core.mail import send_mail
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
+ from django.conf import settings
from .models import Contact
+ def my_mail_send(subject, recipient, message):
+ subject = '%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject)
+ from_email = settings.SERVER_EMAIL
+ return send_mail(subject, message, from_email, [recipient])
+
+
class ContactForm(forms.Form):
- personsList = forms.ModelChoiceField(required=True, label=_("Contact person"),
+ recipient = forms.ModelChoiceField(required=True, label=_("Contact person"),
- queryset=Contact.objects.all())
+ queryset=Contact.objects.all())
topic = forms.CharField(required=True, max_length=150,
label=_("Topic of messages"))
body = forms.CharField(required=True, widget=forms.Textarea(), label=_("Content"))
email = forms.EmailField(required=True, label=_("E-mail"))
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('contact:form')
self.helper.form_method = 'post'
self.helper.add_input(Submit('submit', _('Send'), css_class="btn-lg btn-block"))
def get_text(self):
return "%(body)s \n\nE-mail: %(email)s" % self.cleaned_data
def send(self):
- mail_managers(self.cleaned_data['topic'], self.get_text())
+ my_mail_send(subject=self.cleaned_data['topic'],
+ recipient=self.cleaned_data['recipient'].email,
+ message=self.get_text())
|
Fix contact form - send to recipient, not managers
|
## Code Before:
from django import forms
from django.core.mail import mail_managers
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from .models import Contact
class ContactForm(forms.Form):
personsList = forms.ModelChoiceField(required=True, label=_("Contact person"),
queryset=Contact.objects.all())
topic = forms.CharField(required=True, max_length=150,
label=_("Topic of messages"))
body = forms.CharField(required=True, widget=forms.Textarea(), label=_("Content"))
email = forms.EmailField(required=True, label=_("E-mail"))
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('contact:form')
self.helper.form_method = 'post'
self.helper.add_input(Submit('submit', _('Send'), css_class="btn-lg btn-block"))
def get_text(self):
return "%(body)s \n\nE-mail: %(email)s" % self.cleaned_data
def send(self):
mail_managers(self.cleaned_data['topic'], self.get_text())
## Instruction:
Fix contact form - send to recipient, not managers
## Code After:
from django import forms
from django.core.mail import send_mail
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django.conf import settings
from .models import Contact
def my_mail_send(subject, recipient, message):
subject = '%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject)
from_email = settings.SERVER_EMAIL
return send_mail(subject, message, from_email, [recipient])
class ContactForm(forms.Form):
recipient = forms.ModelChoiceField(required=True, label=_("Contact person"),
queryset=Contact.objects.all())
topic = forms.CharField(required=True, max_length=150,
label=_("Topic of messages"))
body = forms.CharField(required=True, widget=forms.Textarea(), label=_("Content"))
email = forms.EmailField(required=True, label=_("E-mail"))
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('contact:form')
self.helper.form_method = 'post'
self.helper.add_input(Submit('submit', _('Send'), css_class="btn-lg btn-block"))
def get_text(self):
return "%(body)s \n\nE-mail: %(email)s" % self.cleaned_data
def send(self):
my_mail_send(subject=self.cleaned_data['topic'],
recipient=self.cleaned_data['recipient'].email,
message=self.get_text())
|
...
from django import forms
from django.core.mail import send_mail
from django.utils.translation import ugettext as _
...
from crispy_forms.layout import Submit
from django.conf import settings
from .models import Contact
...
def my_mail_send(subject, recipient, message):
subject = '%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject)
from_email = settings.SERVER_EMAIL
return send_mail(subject, message, from_email, [recipient])
class ContactForm(forms.Form):
recipient = forms.ModelChoiceField(required=True, label=_("Contact person"),
queryset=Contact.objects.all())
topic = forms.CharField(required=True, max_length=150,
...
def send(self):
my_mail_send(subject=self.cleaned_data['topic'],
recipient=self.cleaned_data['recipient'].email,
message=self.get_text())
...
|
695ee95faf0ae80f0c69bf47e881af22ab0f00cd
|
l10n_it_esigibilita_iva/models/account.py
|
l10n_it_esigibilita_iva/models/account.py
|
from odoo import models, fields
class AccountTax(models.Model):
_inherit = 'account.tax'
payability = fields.Selection([
('I', 'Immediate payability'),
('D', 'Deferred payability'),
('S', 'Split payment'),
], string="VAT payability")
|
from odoo import models, fields
class AccountTax(models.Model):
_inherit = 'account.tax'
payability = fields.Selection([
('I', 'VAT payable immediately'),
('D', 'unrealized VAT'),
('S', 'split payments'),
], string="VAT payability")
|
Use correct english terms, from APPENDIX A -TECHNICAL SPECIFICATIONS
|
Use correct english terms, from APPENDIX A -TECHNICAL SPECIFICATIONS
|
Python
|
agpl-3.0
|
dcorio/l10n-italy,OCA/l10n-italy,OCA/l10n-italy,dcorio/l10n-italy,dcorio/l10n-italy,OCA/l10n-italy
|
from odoo import models, fields
class AccountTax(models.Model):
_inherit = 'account.tax'
payability = fields.Selection([
- ('I', 'Immediate payability'),
- ('D', 'Deferred payability'),
+ ('I', 'VAT payable immediately'),
+ ('D', 'unrealized VAT'),
- ('S', 'Split payment'),
+ ('S', 'split payments'),
], string="VAT payability")
|
Use correct english terms, from APPENDIX A -TECHNICAL SPECIFICATIONS
|
## Code Before:
from odoo import models, fields
class AccountTax(models.Model):
_inherit = 'account.tax'
payability = fields.Selection([
('I', 'Immediate payability'),
('D', 'Deferred payability'),
('S', 'Split payment'),
], string="VAT payability")
## Instruction:
Use correct english terms, from APPENDIX A -TECHNICAL SPECIFICATIONS
## Code After:
from odoo import models, fields
class AccountTax(models.Model):
_inherit = 'account.tax'
payability = fields.Selection([
('I', 'VAT payable immediately'),
('D', 'unrealized VAT'),
('S', 'split payments'),
], string="VAT payability")
|
// ... existing code ...
payability = fields.Selection([
('I', 'VAT payable immediately'),
('D', 'unrealized VAT'),
('S', 'split payments'),
], string="VAT payability")
// ... rest of the code ...
|
faae5df8648afbfa5921bd67a7f3e082ba626a95
|
poyo/__init__.py
|
poyo/__init__.py
|
__author__ = 'Raphael Pierzina'
__email__ = '[email protected]'
__version__ = '0.1.0'
from .parser import parse_string
__all__ = ['parse_string']
|
from .parser import parse_string
__author__ = 'Raphael Pierzina'
__email__ = '[email protected]'
__version__ = '0.1.0'
__all__ = ['parse_string']
|
Move module level import to top of file
|
Move module level import to top of file
|
Python
|
mit
|
hackebrot/poyo
|
+
+ from .parser import parse_string
__author__ = 'Raphael Pierzina'
__email__ = '[email protected]'
__version__ = '0.1.0'
- from .parser import parse_string
-
__all__ = ['parse_string']
|
Move module level import to top of file
|
## Code Before:
__author__ = 'Raphael Pierzina'
__email__ = '[email protected]'
__version__ = '0.1.0'
from .parser import parse_string
__all__ = ['parse_string']
## Instruction:
Move module level import to top of file
## Code After:
from .parser import parse_string
__author__ = 'Raphael Pierzina'
__email__ = '[email protected]'
__version__ = '0.1.0'
__all__ = ['parse_string']
|
...
from .parser import parse_string
...
__all__ = ['parse_string']
...
|
65c3f9fa4e31bc2c1c448846faba4af58bfd5e61
|
src/download.py
|
src/download.py
|
import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
|
import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if not os.path.exists(save_dir):
os.makedirs(save_dir)
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
|
Make dataset directory if it does not exist.
|
[fix] Make dataset directory if it does not exist.
|
Python
|
mit
|
dsanno/chainer-cifar
|
import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
+ if not os.path.exists(save_dir):
+ os.makedirs(save_dir)
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
|
Make dataset directory if it does not exist.
|
## Code Before:
import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
## Instruction:
Make dataset directory if it does not exist.
## Code After:
import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if not os.path.exists(save_dir):
os.makedirs(save_dir)
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
|
# ... existing code ...
if __name__ == '__main__':
if not os.path.exists(save_dir):
os.makedirs(save_dir)
if os.path.exists(tar_path):
# ... rest of the code ...
|
27723696885319aabea974f83189d3a43770b7d5
|
spillway/fields.py
|
spillway/fields.py
|
"""Serializer fields"""
from django.contrib.gis import forms
from rest_framework.fields import WritableField
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
|
"""Serializer fields"""
from django.contrib.gis import forms
from rest_framework.fields import FileField, WritableField
from greenwich.raster import Raster
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
class NDArrayField(FileField):
type_name = 'NDArrayField'
type_label = 'ndarray'
def to_native(self, value):
params = self.context.get('params', {})
geom = params.get('g')
with Raster(getattr(value, 'path', value)) as r:
arr = r.clip(geom).masked_array() if geom else r.array()
return arr.tolist()
|
Add numpy array serializer field
|
Add numpy array serializer field
|
Python
|
bsd-3-clause
|
bkg/django-spillway,barseghyanartur/django-spillway,kuzmich/django-spillway
|
"""Serializer fields"""
from django.contrib.gis import forms
- from rest_framework.fields import WritableField
+ from rest_framework.fields import FileField, WritableField
+ from greenwich.raster import Raster
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
+
+ class NDArrayField(FileField):
+ type_name = 'NDArrayField'
+ type_label = 'ndarray'
+
+ def to_native(self, value):
+ params = self.context.get('params', {})
+ geom = params.get('g')
+ with Raster(getattr(value, 'path', value)) as r:
+ arr = r.clip(geom).masked_array() if geom else r.array()
+ return arr.tolist()
+
|
Add numpy array serializer field
|
## Code Before:
"""Serializer fields"""
from django.contrib.gis import forms
from rest_framework.fields import WritableField
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
## Instruction:
Add numpy array serializer field
## Code After:
"""Serializer fields"""
from django.contrib.gis import forms
from rest_framework.fields import FileField, WritableField
from greenwich.raster import Raster
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
class NDArrayField(FileField):
type_name = 'NDArrayField'
type_label = 'ndarray'
def to_native(self, value):
params = self.context.get('params', {})
geom = params.get('g')
with Raster(getattr(value, 'path', value)) as r:
arr = r.clip(geom).masked_array() if geom else r.array()
return arr.tolist()
|
# ... existing code ...
from django.contrib.gis import forms
from rest_framework.fields import FileField, WritableField
from greenwich.raster import Raster
# ... modified code ...
return super(GeometryField, self).from_native(value)
class NDArrayField(FileField):
type_name = 'NDArrayField'
type_label = 'ndarray'
def to_native(self, value):
params = self.context.get('params', {})
geom = params.get('g')
with Raster(getattr(value, 'path', value)) as r:
arr = r.clip(geom).masked_array() if geom else r.array()
return arr.tolist()
# ... rest of the code ...
|
de1ff8a480cc6d6e86bb179e6820ab9f21145679
|
byceps/services/user/event_service.py
|
byceps/services/user/event_service.py
|
from datetime import datetime
from typing import Sequence
from ...database import db
from ...typing import UserID
from .models.event import UserEvent, UserEventData
def create_event(event_type: str, user_id: UserID, data: UserEventData) -> None:
"""Create a user event."""
event = _build_event(event_type, user_id, data)
db.session.add(event)
db.session.commit()
def _build_event(event_type: str, user_id: UserID, data: UserEventData
) -> UserEvent:
"""Assemble, but not persist, a user event."""
now = datetime.utcnow()
return UserEvent(now, event_type, user_id, data)
def get_events_for_user(user_id: UserID) -> Sequence[UserEvent]:
"""Return the events for that user."""
return UserEvent.query \
.filter_by(user_id=user_id) \
.order_by(UserEvent.occurred_at) \
.all()
|
from datetime import datetime
from typing import Optional, Sequence
from ...database import db
from ...typing import UserID
from .models.event import UserEvent, UserEventData
def create_event(event_type: str, user_id: UserID, data: UserEventData) -> None:
"""Create a user event."""
event = _build_event(event_type, user_id, data)
db.session.add(event)
db.session.commit()
def _build_event(event_type: str, user_id: UserID, data: UserEventData,
occurred_at: Optional[datetime]=None) -> UserEvent:
"""Assemble, but not persist, a user event."""
if occurred_at is None:
occurred_at = datetime.utcnow()
return UserEvent(occurred_at, event_type, user_id, data)
def get_events_for_user(user_id: UserID) -> Sequence[UserEvent]:
"""Return the events for that user."""
return UserEvent.query \
.filter_by(user_id=user_id) \
.order_by(UserEvent.occurred_at) \
.all()
|
Allow to provide a custom `occurred_at` value when building a user event
|
Allow to provide a custom `occurred_at` value when building a user event
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps
|
from datetime import datetime
- from typing import Sequence
+ from typing import Optional, Sequence
from ...database import db
from ...typing import UserID
from .models.event import UserEvent, UserEventData
def create_event(event_type: str, user_id: UserID, data: UserEventData) -> None:
"""Create a user event."""
event = _build_event(event_type, user_id, data)
db.session.add(event)
db.session.commit()
- def _build_event(event_type: str, user_id: UserID, data: UserEventData
+ def _build_event(event_type: str, user_id: UserID, data: UserEventData,
- ) -> UserEvent:
+ occurred_at: Optional[datetime]=None) -> UserEvent:
"""Assemble, but not persist, a user event."""
+ if occurred_at is None:
- now = datetime.utcnow()
+ occurred_at = datetime.utcnow()
- return UserEvent(now, event_type, user_id, data)
+ return UserEvent(occurred_at, event_type, user_id, data)
def get_events_for_user(user_id: UserID) -> Sequence[UserEvent]:
"""Return the events for that user."""
return UserEvent.query \
.filter_by(user_id=user_id) \
.order_by(UserEvent.occurred_at) \
.all()
|
Allow to provide a custom `occurred_at` value when building a user event
|
## Code Before:
from datetime import datetime
from typing import Sequence
from ...database import db
from ...typing import UserID
from .models.event import UserEvent, UserEventData
def create_event(event_type: str, user_id: UserID, data: UserEventData) -> None:
"""Create a user event."""
event = _build_event(event_type, user_id, data)
db.session.add(event)
db.session.commit()
def _build_event(event_type: str, user_id: UserID, data: UserEventData
) -> UserEvent:
"""Assemble, but not persist, a user event."""
now = datetime.utcnow()
return UserEvent(now, event_type, user_id, data)
def get_events_for_user(user_id: UserID) -> Sequence[UserEvent]:
"""Return the events for that user."""
return UserEvent.query \
.filter_by(user_id=user_id) \
.order_by(UserEvent.occurred_at) \
.all()
## Instruction:
Allow to provide a custom `occurred_at` value when building a user event
## Code After:
from datetime import datetime
from typing import Optional, Sequence
from ...database import db
from ...typing import UserID
from .models.event import UserEvent, UserEventData
def create_event(event_type: str, user_id: UserID, data: UserEventData) -> None:
"""Create a user event."""
event = _build_event(event_type, user_id, data)
db.session.add(event)
db.session.commit()
def _build_event(event_type: str, user_id: UserID, data: UserEventData,
occurred_at: Optional[datetime]=None) -> UserEvent:
"""Assemble, but not persist, a user event."""
if occurred_at is None:
occurred_at = datetime.utcnow()
return UserEvent(occurred_at, event_type, user_id, data)
def get_events_for_user(user_id: UserID) -> Sequence[UserEvent]:
"""Return the events for that user."""
return UserEvent.query \
.filter_by(user_id=user_id) \
.order_by(UserEvent.occurred_at) \
.all()
|
...
from datetime import datetime
from typing import Optional, Sequence
...
def _build_event(event_type: str, user_id: UserID, data: UserEventData,
occurred_at: Optional[datetime]=None) -> UserEvent:
"""Assemble, but not persist, a user event."""
if occurred_at is None:
occurred_at = datetime.utcnow()
return UserEvent(occurred_at, event_type, user_id, data)
...
|
48ab19d9f81fc9973249e600f938586182fe6c7b
|
shop/rest/auth.py
|
shop/rest/auth.py
|
from __future__ import unicode_literals
from django.conf import settings
from django.template.loader import select_template
from rest_framework.serializers import CharField
from rest_auth import serializers
from shop import settings as shop_settings
class PasswordResetSerializer(serializers.PasswordResetSerializer):
def save(self):
subject_template = select_template([
'{}/email/reset-password-subject.txt'.format(shop_settings.APP_LABEL),
'shop/email/reset-password-subject.txt',
])
body_template = select_template([
'{}/email/reset-password-body.txt'.format(shop_settings.APP_LABEL),
'shop/email/reset-password-body.txt',
])
opts = {
'use_https': self.context['request'].is_secure(),
'from_email': getattr(settings, 'DEFAULT_FROM_EMAIL'),
'request': self.context['request'],
'subject_template_name': subject_template.name,
'email_template_name': body_template.name,
}
self.reset_form.save(**opts)
class PasswordResetConfirmSerializer(serializers.PasswordResetConfirmSerializer):
new_password1 = CharField(min_length=6, max_length=128)
new_password2 = CharField(min_length=6, max_length=128)
|
from __future__ import unicode_literals
from django.conf import settings
from django.template.loader import select_template
from rest_framework.serializers import CharField
from rest_auth import serializers
from shop import settings as shop_settings
class PasswordResetSerializer(serializers.PasswordResetSerializer):
def save(self):
subject_template = select_template([
'{}/email/reset-password-subject.txt'.format(shop_settings.APP_LABEL),
'shop/email/reset-password-subject.txt',
])
body_template = select_template([
'{}/email/reset-password-body.txt'.format(shop_settings.APP_LABEL),
'shop/email/reset-password-body.txt',
])
opts = {
'use_https': self.context['request'].is_secure(),
'from_email': getattr(settings, 'DEFAULT_FROM_EMAIL'),
'request': self.context['request'],
'subject_template_name': subject_template.template.name,
'email_template_name': body_template.template.name,
}
self.reset_form.save(**opts)
class PasswordResetConfirmSerializer(serializers.PasswordResetConfirmSerializer):
new_password1 = CharField(min_length=6, max_length=128)
new_password2 = CharField(min_length=6, max_length=128)
|
Fix a failing test for PasswordResetSerializer
|
Fix a failing test for PasswordResetSerializer
It seems that Django's template API changed. This should adjust to that.
|
Python
|
bsd-3-clause
|
awesto/django-shop,nimbis/django-shop,nimbis/django-shop,khchine5/django-shop,jrief/django-shop,rfleschenberg/django-shop,awesto/django-shop,khchine5/django-shop,divio/django-shop,divio/django-shop,nimbis/django-shop,jrief/django-shop,nimbis/django-shop,rfleschenberg/django-shop,jrief/django-shop,rfleschenberg/django-shop,jrief/django-shop,rfleschenberg/django-shop,khchine5/django-shop,khchine5/django-shop,awesto/django-shop,divio/django-shop
|
from __future__ import unicode_literals
from django.conf import settings
from django.template.loader import select_template
from rest_framework.serializers import CharField
from rest_auth import serializers
from shop import settings as shop_settings
class PasswordResetSerializer(serializers.PasswordResetSerializer):
def save(self):
subject_template = select_template([
'{}/email/reset-password-subject.txt'.format(shop_settings.APP_LABEL),
'shop/email/reset-password-subject.txt',
])
body_template = select_template([
'{}/email/reset-password-body.txt'.format(shop_settings.APP_LABEL),
'shop/email/reset-password-body.txt',
])
opts = {
'use_https': self.context['request'].is_secure(),
'from_email': getattr(settings, 'DEFAULT_FROM_EMAIL'),
'request': self.context['request'],
- 'subject_template_name': subject_template.name,
+ 'subject_template_name': subject_template.template.name,
- 'email_template_name': body_template.name,
+ 'email_template_name': body_template.template.name,
}
self.reset_form.save(**opts)
class PasswordResetConfirmSerializer(serializers.PasswordResetConfirmSerializer):
new_password1 = CharField(min_length=6, max_length=128)
new_password2 = CharField(min_length=6, max_length=128)
|
Fix a failing test for PasswordResetSerializer
|
## Code Before:
from __future__ import unicode_literals
from django.conf import settings
from django.template.loader import select_template
from rest_framework.serializers import CharField
from rest_auth import serializers
from shop import settings as shop_settings
class PasswordResetSerializer(serializers.PasswordResetSerializer):
def save(self):
subject_template = select_template([
'{}/email/reset-password-subject.txt'.format(shop_settings.APP_LABEL),
'shop/email/reset-password-subject.txt',
])
body_template = select_template([
'{}/email/reset-password-body.txt'.format(shop_settings.APP_LABEL),
'shop/email/reset-password-body.txt',
])
opts = {
'use_https': self.context['request'].is_secure(),
'from_email': getattr(settings, 'DEFAULT_FROM_EMAIL'),
'request': self.context['request'],
'subject_template_name': subject_template.name,
'email_template_name': body_template.name,
}
self.reset_form.save(**opts)
class PasswordResetConfirmSerializer(serializers.PasswordResetConfirmSerializer):
new_password1 = CharField(min_length=6, max_length=128)
new_password2 = CharField(min_length=6, max_length=128)
## Instruction:
Fix a failing test for PasswordResetSerializer
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.template.loader import select_template
from rest_framework.serializers import CharField
from rest_auth import serializers
from shop import settings as shop_settings
class PasswordResetSerializer(serializers.PasswordResetSerializer):
def save(self):
subject_template = select_template([
'{}/email/reset-password-subject.txt'.format(shop_settings.APP_LABEL),
'shop/email/reset-password-subject.txt',
])
body_template = select_template([
'{}/email/reset-password-body.txt'.format(shop_settings.APP_LABEL),
'shop/email/reset-password-body.txt',
])
opts = {
'use_https': self.context['request'].is_secure(),
'from_email': getattr(settings, 'DEFAULT_FROM_EMAIL'),
'request': self.context['request'],
'subject_template_name': subject_template.template.name,
'email_template_name': body_template.template.name,
}
self.reset_form.save(**opts)
class PasswordResetConfirmSerializer(serializers.PasswordResetConfirmSerializer):
new_password1 = CharField(min_length=6, max_length=128)
new_password2 = CharField(min_length=6, max_length=128)
|
# ... existing code ...
'request': self.context['request'],
'subject_template_name': subject_template.template.name,
'email_template_name': body_template.template.name,
}
# ... rest of the code ...
|
14414263ef7578ec0c710e99de0f62c49319c6be
|
saw-remote-api/python/tests/saw/test_provers.py
|
saw-remote-api/python/tests/saw/test_provers.py
|
from cryptol import cryptoltypes
from cryptol.bitvector import BV
import saw
from saw.proofscript import *
import unittest
from pathlib import Path
def cry(exp):
return cryptoltypes.CryptolLiteral(exp)
class ProverTest(unittest.TestCase):
@classmethod
def setUpClass(self):
saw.connect(reset_server=True)
@classmethod
def tearDownClass(self):
saw.reset_server()
saw.disconnect()
def test_provers(self):
if __name__ == "__main__": saw.view(saw.LogResults())
simple_thm = cry('\(x:[8]) -> x != x+1')
self.assertTrue(saw.prove(simple_thm, ProofScript([abc])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([yices([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([z3([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([Admit()])).is_valid())
self.assertTrue(saw.prove(cry('True'), ProofScript([Trivial()])).is_valid())
simple_non_thm = cry('\(x:[8]) -> x != 5')
pr = saw.prove(simple_non_thm, ProofScript([z3([])]))
self.assertFalse(pr.is_valid())
cex = pr.get_counterexample()
self.assertEqual(cex, [('x', BV(8, 0x05))])
if __name__ == "__main__":
unittest.main()
|
from cryptol import cryptoltypes
from cryptol.bitvector import BV
import saw
from saw.proofscript import *
import unittest
from pathlib import Path
def cry(exp):
return cryptoltypes.CryptolLiteral(exp)
class ProverTest(unittest.TestCase):
@classmethod
def setUpClass(self):
saw.connect(reset_server=True)
@classmethod
def tearDownClass(self):
saw.reset_server()
saw.disconnect()
def test_provers(self):
if __name__ == "__main__": saw.view(saw.LogResults())
simple_thm = cry('\(x:[8]) -> x != x+1')
self.assertTrue(saw.prove(simple_thm, ProofScript([abc])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([z3([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([Admit()])).is_valid())
self.assertTrue(saw.prove(cry('True'), ProofScript([Trivial()])).is_valid())
simple_non_thm = cry('\(x:[8]) -> x != 5')
pr = saw.prove(simple_non_thm, ProofScript([z3([])]))
self.assertFalse(pr.is_valid())
cex = pr.get_counterexample()
self.assertEqual(cex, [('x', BV(8, 0x05))])
if __name__ == "__main__":
unittest.main()
|
Remove Yices test from RPC prover test
|
Remove Yices test from RPC prover test
|
Python
|
bsd-3-clause
|
GaloisInc/saw-script,GaloisInc/saw-script,GaloisInc/saw-script,GaloisInc/saw-script,GaloisInc/saw-script
|
from cryptol import cryptoltypes
from cryptol.bitvector import BV
import saw
from saw.proofscript import *
import unittest
from pathlib import Path
def cry(exp):
return cryptoltypes.CryptolLiteral(exp)
class ProverTest(unittest.TestCase):
@classmethod
def setUpClass(self):
saw.connect(reset_server=True)
@classmethod
def tearDownClass(self):
saw.reset_server()
saw.disconnect()
def test_provers(self):
if __name__ == "__main__": saw.view(saw.LogResults())
simple_thm = cry('\(x:[8]) -> x != x+1')
self.assertTrue(saw.prove(simple_thm, ProofScript([abc])).is_valid())
- self.assertTrue(saw.prove(simple_thm, ProofScript([yices([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([z3([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([Admit()])).is_valid())
self.assertTrue(saw.prove(cry('True'), ProofScript([Trivial()])).is_valid())
simple_non_thm = cry('\(x:[8]) -> x != 5')
pr = saw.prove(simple_non_thm, ProofScript([z3([])]))
self.assertFalse(pr.is_valid())
cex = pr.get_counterexample()
self.assertEqual(cex, [('x', BV(8, 0x05))])
if __name__ == "__main__":
unittest.main()
|
Remove Yices test from RPC prover test
|
## Code Before:
from cryptol import cryptoltypes
from cryptol.bitvector import BV
import saw
from saw.proofscript import *
import unittest
from pathlib import Path
def cry(exp):
return cryptoltypes.CryptolLiteral(exp)
class ProverTest(unittest.TestCase):
@classmethod
def setUpClass(self):
saw.connect(reset_server=True)
@classmethod
def tearDownClass(self):
saw.reset_server()
saw.disconnect()
def test_provers(self):
if __name__ == "__main__": saw.view(saw.LogResults())
simple_thm = cry('\(x:[8]) -> x != x+1')
self.assertTrue(saw.prove(simple_thm, ProofScript([abc])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([yices([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([z3([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([Admit()])).is_valid())
self.assertTrue(saw.prove(cry('True'), ProofScript([Trivial()])).is_valid())
simple_non_thm = cry('\(x:[8]) -> x != 5')
pr = saw.prove(simple_non_thm, ProofScript([z3([])]))
self.assertFalse(pr.is_valid())
cex = pr.get_counterexample()
self.assertEqual(cex, [('x', BV(8, 0x05))])
if __name__ == "__main__":
unittest.main()
## Instruction:
Remove Yices test from RPC prover test
## Code After:
from cryptol import cryptoltypes
from cryptol.bitvector import BV
import saw
from saw.proofscript import *
import unittest
from pathlib import Path
def cry(exp):
return cryptoltypes.CryptolLiteral(exp)
class ProverTest(unittest.TestCase):
@classmethod
def setUpClass(self):
saw.connect(reset_server=True)
@classmethod
def tearDownClass(self):
saw.reset_server()
saw.disconnect()
def test_provers(self):
if __name__ == "__main__": saw.view(saw.LogResults())
simple_thm = cry('\(x:[8]) -> x != x+1')
self.assertTrue(saw.prove(simple_thm, ProofScript([abc])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([z3([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([Admit()])).is_valid())
self.assertTrue(saw.prove(cry('True'), ProofScript([Trivial()])).is_valid())
simple_non_thm = cry('\(x:[8]) -> x != 5')
pr = saw.prove(simple_non_thm, ProofScript([z3([])]))
self.assertFalse(pr.is_valid())
cex = pr.get_counterexample()
self.assertEqual(cex, [('x', BV(8, 0x05))])
if __name__ == "__main__":
unittest.main()
|
...
self.assertTrue(saw.prove(simple_thm, ProofScript([abc])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([z3([])])).is_valid())
...
|
7dd228d7eaad6b1f37ff3c4d954aebe0ffa99170
|
tests/test_targets/test_targets.py
|
tests/test_targets/test_targets.py
|
import os
from unittest import TestCase
from project_generator_definitions.definitions import ProGenTargets
class TestAllTargets(TestCase):
"""test all targets"""
def setUp(self):
self.progen_target = ProGenTargets()
self.targets_list = self.progen_target.get_targets()
def test_targets_validity(self):
for target in self.targets_list:
record = self.progen_target.get_target_record(target)
assert record['target']['name'][0]
assert record['target']['mcu'][0]
def test_targets_mcu_validity(self):
for target in self.targets_list:
mcu = self.progen_target.get_mcu_record(target)
assert mcu['mcu']
assert mcu['mcu']['name']
assert mcu['mcu']['core']
|
from unittest import TestCase
from project_generator_definitions.definitions import ProGenTargets
class TestAllTargets(TestCase):
"""test all targets"""
def setUp(self):
self.progen_target = ProGenTargets()
self.targets_list = self.progen_target.get_targets()
def test_targets_validity(self):
# Cehck for required info for targets
for target in self.targets_list:
record = self.progen_target.get_target_record(target)
assert record['target']['name'][0]
assert record['target']['mcu'][0]
def test_targets_mcu_validity(self):
# Check for required info in mcu
for target in self.targets_list:
mcu = self.progen_target.get_mcu_record(target)
assert mcu['mcu'][0]
assert mcu['mcu']['name'][0]
assert mcu['mcu']['core'][0]
|
Test - targets test fix mcu validity indexes
|
Test - targets test fix mcu validity indexes
|
Python
|
apache-2.0
|
project-generator/project_generator_definitions,0xc0170/project_generator_definitions,ohagendorf/project_generator_definitions
|
- import os
from unittest import TestCase
from project_generator_definitions.definitions import ProGenTargets
class TestAllTargets(TestCase):
"""test all targets"""
def setUp(self):
self.progen_target = ProGenTargets()
self.targets_list = self.progen_target.get_targets()
def test_targets_validity(self):
+ # Cehck for required info for targets
for target in self.targets_list:
record = self.progen_target.get_target_record(target)
assert record['target']['name'][0]
assert record['target']['mcu'][0]
def test_targets_mcu_validity(self):
+ # Check for required info in mcu
for target in self.targets_list:
mcu = self.progen_target.get_mcu_record(target)
- assert mcu['mcu']
+ assert mcu['mcu'][0]
- assert mcu['mcu']['name']
+ assert mcu['mcu']['name'][0]
- assert mcu['mcu']['core']
+ assert mcu['mcu']['core'][0]
|
Test - targets test fix mcu validity indexes
|
## Code Before:
import os
from unittest import TestCase
from project_generator_definitions.definitions import ProGenTargets
class TestAllTargets(TestCase):
"""test all targets"""
def setUp(self):
self.progen_target = ProGenTargets()
self.targets_list = self.progen_target.get_targets()
def test_targets_validity(self):
for target in self.targets_list:
record = self.progen_target.get_target_record(target)
assert record['target']['name'][0]
assert record['target']['mcu'][0]
def test_targets_mcu_validity(self):
for target in self.targets_list:
mcu = self.progen_target.get_mcu_record(target)
assert mcu['mcu']
assert mcu['mcu']['name']
assert mcu['mcu']['core']
## Instruction:
Test - targets test fix mcu validity indexes
## Code After:
from unittest import TestCase
from project_generator_definitions.definitions import ProGenTargets
class TestAllTargets(TestCase):
"""test all targets"""
def setUp(self):
self.progen_target = ProGenTargets()
self.targets_list = self.progen_target.get_targets()
def test_targets_validity(self):
# Cehck for required info for targets
for target in self.targets_list:
record = self.progen_target.get_target_record(target)
assert record['target']['name'][0]
assert record['target']['mcu'][0]
def test_targets_mcu_validity(self):
# Check for required info in mcu
for target in self.targets_list:
mcu = self.progen_target.get_mcu_record(target)
assert mcu['mcu'][0]
assert mcu['mcu']['name'][0]
assert mcu['mcu']['core'][0]
|
# ... existing code ...
# ... modified code ...
def test_targets_validity(self):
# Cehck for required info for targets
for target in self.targets_list:
...
def test_targets_mcu_validity(self):
# Check for required info in mcu
for target in self.targets_list:
...
mcu = self.progen_target.get_mcu_record(target)
assert mcu['mcu'][0]
assert mcu['mcu']['name'][0]
assert mcu['mcu']['core'][0]
# ... rest of the code ...
|
5e6d52277e34c254bad6b386cf05f490baf6a6f2
|
webapp-django/accounts/models.py
|
webapp-django/accounts/models.py
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solvedChallenges=models.CharField(solved=[],max_length=256)
solvedQuestions=models.CharField(solved=[],max_length=256)
score = models.IntegerField(default=0)
def __str__(self):
return str(self.user.username)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from challenges.models import Challenge
from questionnaire.models import Question
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solved_challenges = models.ManyToManyField(Challenge)
solved_questions = models.ManyToManyField(Question)
score = models.IntegerField(default=0, editable=False)
def __str__(self):
return str(self.user.username)
def calculate_score(self):
score = 0
for chal in self.solved_challenges.all():
score = score + chal.score
for ques in self.solved_questions.all():
score = score + ques.score
return score
def save(self, *args, **kwargs):
'''On save, update score '''
self.score = self.calculate_score()
return super(UserProfile, self).save(*args, **kwargs)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
Update accounts model with scoring system
|
Update accounts model with scoring system
|
Python
|
mit
|
super1337/Super1337-CTF,super1337/Super1337-CTF,super1337/Super1337-CTF
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
+ from challenges.models import Challenge
+ from questionnaire.models import Question
+
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
- solvedChallenges=models.CharField(solved=[],max_length=256)
- solvedQuestions=models.CharField(solved=[],max_length=256)
- score = models.IntegerField(default=0)
+ solved_challenges = models.ManyToManyField(Challenge)
+ solved_questions = models.ManyToManyField(Question)
+ score = models.IntegerField(default=0, editable=False)
def __str__(self):
return str(self.user.username)
+
+ def calculate_score(self):
+ score = 0
+ for chal in self.solved_challenges.all():
+ score = score + chal.score
+ for ques in self.solved_questions.all():
+ score = score + ques.score
+
+ return score
+
+ def save(self, *args, **kwargs):
+ '''On save, update score '''
+
+ self.score = self.calculate_score()
+ return super(UserProfile, self).save(*args, **kwargs)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
Update accounts model with scoring system
|
## Code Before:
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solvedChallenges=models.CharField(solved=[],max_length=256)
solvedQuestions=models.CharField(solved=[],max_length=256)
score = models.IntegerField(default=0)
def __str__(self):
return str(self.user.username)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
## Instruction:
Update accounts model with scoring system
## Code After:
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from challenges.models import Challenge
from questionnaire.models import Question
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solved_challenges = models.ManyToManyField(Challenge)
solved_questions = models.ManyToManyField(Question)
score = models.IntegerField(default=0, editable=False)
def __str__(self):
return str(self.user.username)
def calculate_score(self):
score = 0
for chal in self.solved_challenges.all():
score = score + chal.score
for ques in self.solved_questions.all():
score = score + ques.score
return score
def save(self, *args, **kwargs):
'''On save, update score '''
self.score = self.calculate_score()
return super(UserProfile, self).save(*args, **kwargs)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
// ... existing code ...
from challenges.models import Challenge
from questionnaire.models import Question
// ... modified code ...
bio = models.TextField(max_length=256, blank=True)
solved_challenges = models.ManyToManyField(Challenge)
solved_questions = models.ManyToManyField(Question)
score = models.IntegerField(default=0, editable=False)
...
return str(self.user.username)
def calculate_score(self):
score = 0
for chal in self.solved_challenges.all():
score = score + chal.score
for ques in self.solved_questions.all():
score = score + ques.score
return score
def save(self, *args, **kwargs):
'''On save, update score '''
self.score = self.calculate_score()
return super(UserProfile, self).save(*args, **kwargs)
// ... rest of the code ...
|
4124297475fb7d77bf492e721a74fcfa02547a14
|
benchmark/bench_logger_level_low.py
|
benchmark/bench_logger_level_low.py
|
"""Benchmarks too low logger levels"""
from logbook import Logger, ERROR
log = Logger('Test logger')
log.level = ERROR
def run():
for x in xrange(500):
log.warning('this is not handled')
|
"""Benchmarks too low logger levels"""
from logbook import Logger, StreamHandler, ERROR
from cStringIO import StringIO
log = Logger('Test logger')
log.level = ERROR
def run():
out = StringIO()
with StreamHandler(out):
for x in xrange(500):
log.warning('this is not handled')
|
Create a stream handler even though it's not used to have the same overhead on both logbook and logging
|
Create a stream handler even though it's not used to have the same overhead on both logbook and logging
|
Python
|
bsd-3-clause
|
DasIch/logbook,maykinmedia/logbook,alonho/logbook,fayazkhan/logbook,maykinmedia/logbook,Rafiot/logbook,dvarrazzo/logbook,mbr/logbook,mitsuhiko/logbook,dvarrazzo/logbook,RazerM/logbook,FintanH/logbook,omergertel/logbook,dommert/logbook,DasIch/logbook,alex/logbook,alonho/logbook,pombredanne/logbook,alonho/logbook,alex/logbook,DasIch/logbook,mbr/logbook,fayazkhan/logbook,narfdotpl/logbook,redtoad/logbook,redtoad/logbook,Rafiot/logbook,omergertel/logbook,Rafiot/logbook,omergertel/logbook
|
"""Benchmarks too low logger levels"""
- from logbook import Logger, ERROR
+ from logbook import Logger, StreamHandler, ERROR
+ from cStringIO import StringIO
log = Logger('Test logger')
log.level = ERROR
def run():
+ out = StringIO()
+ with StreamHandler(out):
- for x in xrange(500):
+ for x in xrange(500):
- log.warning('this is not handled')
+ log.warning('this is not handled')
|
Create a stream handler even though it's not used to have the same overhead on both logbook and logging
|
## Code Before:
"""Benchmarks too low logger levels"""
from logbook import Logger, ERROR
log = Logger('Test logger')
log.level = ERROR
def run():
for x in xrange(500):
log.warning('this is not handled')
## Instruction:
Create a stream handler even though it's not used to have the same overhead on both logbook and logging
## Code After:
"""Benchmarks too low logger levels"""
from logbook import Logger, StreamHandler, ERROR
from cStringIO import StringIO
log = Logger('Test logger')
log.level = ERROR
def run():
out = StringIO()
with StreamHandler(out):
for x in xrange(500):
log.warning('this is not handled')
|
# ... existing code ...
"""Benchmarks too low logger levels"""
from logbook import Logger, StreamHandler, ERROR
from cStringIO import StringIO
# ... modified code ...
def run():
out = StringIO()
with StreamHandler(out):
for x in xrange(500):
log.warning('this is not handled')
# ... rest of the code ...
|
d5747c8b0f1a82afecf68aadc6b42c77e586493c
|
tools/perf/benchmarks/rasterize_and_record_micro.py
|
tools/perf/benchmarks/rasterize_and_record_micro.py
|
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
|
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
class RasterizeAndRecordMicroKeySilkCases(test.Test):
"""Measures rasterize and record performance on the silk sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_silk_cases.json'
|
Add rasterization microbenchmark for silk
|
Add rasterization microbenchmark for silk
Add rasterize_and_record_micro_key_silk_cases for keeping track of
rasterization and recording performance of silk content. This mirrors
the existing rasterize_and_record_key_silk_cases benchmark and will
potentially allow us to remove it if this microbenchmark produces less
noisy data.
BUG=339517
Review URL: https://codereview.chromium.org/177253003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@253403 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
markYoungH/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,ltilve/chromium,jaruba/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,markYoungH/chromium.src,dednal/chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,patrickm/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,M4sse/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,jaruba/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,fujunwei/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,M4sse/chromium.src,Just-D/chromium-1,littlstar/chromium.src,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,jaruba/chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,dednal/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,patrickm/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,littlstar/chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,axinging/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,Chilledheart/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,jaruba/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,patrickm/chromium.src,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,Chilledheart/chromium,dednal/chromium.src,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,M4sse/chromium.src,littlstar/chromium.src,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,patrickm/chromium.src,dednal/chromium.src,ltilve/chromium,anirudhSK/chromium,chuan9/chromium-crosswalk,anirudhSK/chromium,fujunwei/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,Chilledheart/chromium,dushu1203/chromium.src,ltilve/chromium,littlstar/chromium.src,dednal/chromium.src
|
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
+
+ class RasterizeAndRecordMicroKeySilkCases(test.Test):
+ """Measures rasterize and record performance on the silk sites.
+
+ http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
+ test = rasterize_and_record_micro.RasterizeAndRecordMicro
+ page_set = 'page_sets/key_silk_cases.json'
+
|
Add rasterization microbenchmark for silk
|
## Code Before:
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
## Instruction:
Add rasterization microbenchmark for silk
## Code After:
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
class RasterizeAndRecordMicroKeySilkCases(test.Test):
"""Measures rasterize and record performance on the silk sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_silk_cases.json'
|
...
page_set = 'page_sets/key_mobile_sites.json'
class RasterizeAndRecordMicroKeySilkCases(test.Test):
"""Measures rasterize and record performance on the silk sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_silk_cases.json'
...
|
ca214643b2a93bd9362182134624a8641b44aba2
|
tree_stars/tree_stars.py
|
tree_stars/tree_stars.py
|
from sys import argv
def main(levels):
for level in xrange(levels):
for sub_level in xrange(level+2):
spaces = (levels+2-sub_level) * ' '
stars = ((2 * sub_level) + 1) * '*'
print '{spaces}{stars}'.format(spaces=spaces, stars=stars)
if __name__ == '__main__':
main(int(argv[1]))
|
from sys import argv
def main(levels):
for level in xrange(levels):
for sub_level in xrange(level+2):
stars = ((2 * sub_level) + 1) * '*'
print ('{:^' + str(2 * levels + 2) + '}').format(stars)
# alternate method without using format centering
# spaces = (levels+2-sub_level) * ' '
# print '{spaces}{stars}'.format(spaces=spaces, stars=stars)
if __name__ == '__main__':
main(int(argv[1]))
|
Add solution using format method for centering.
|
Add solution using format method for centering.
|
Python
|
mit
|
bm5w/codeeval
|
from sys import argv
def main(levels):
for level in xrange(levels):
for sub_level in xrange(level+2):
- spaces = (levels+2-sub_level) * ' '
stars = ((2 * sub_level) + 1) * '*'
+ print ('{:^' + str(2 * levels + 2) + '}').format(stars)
+ # alternate method without using format centering
+ # spaces = (levels+2-sub_level) * ' '
- print '{spaces}{stars}'.format(spaces=spaces, stars=stars)
+ # print '{spaces}{stars}'.format(spaces=spaces, stars=stars)
-
if __name__ == '__main__':
main(int(argv[1]))
|
Add solution using format method for centering.
|
## Code Before:
from sys import argv
def main(levels):
for level in xrange(levels):
for sub_level in xrange(level+2):
spaces = (levels+2-sub_level) * ' '
stars = ((2 * sub_level) + 1) * '*'
print '{spaces}{stars}'.format(spaces=spaces, stars=stars)
if __name__ == '__main__':
main(int(argv[1]))
## Instruction:
Add solution using format method for centering.
## Code After:
from sys import argv
def main(levels):
for level in xrange(levels):
for sub_level in xrange(level+2):
stars = ((2 * sub_level) + 1) * '*'
print ('{:^' + str(2 * levels + 2) + '}').format(stars)
# alternate method without using format centering
# spaces = (levels+2-sub_level) * ' '
# print '{spaces}{stars}'.format(spaces=spaces, stars=stars)
if __name__ == '__main__':
main(int(argv[1]))
|
# ... existing code ...
for sub_level in xrange(level+2):
stars = ((2 * sub_level) + 1) * '*'
print ('{:^' + str(2 * levels + 2) + '}').format(stars)
# alternate method without using format centering
# spaces = (levels+2-sub_level) * ' '
# print '{spaces}{stars}'.format(spaces=spaces, stars=stars)
# ... rest of the code ...
|
acccb727054d919a2a36854d8bac502274ed3bdd
|
mp3-formatter/rename_mp3.py
|
mp3-formatter/rename_mp3.py
|
import ID3
import os
import sys
def read_tracklist():
tracklist = []
for line in sys.stdin:
tracklist.append(line)
return tracklist
tracklist = read_tracklist()
mp3_extension = ".mp3"
files_all = os.listdir('.')
files = []
for f in files_all:
# Prune directories
if not os.path.isfile(f):
continue
# Prune non-MP3 files
filename, extension = os.path.splitext(f)
if extension != mp3_extension:
continue
# Prune this file
f_temp = os.path.abspath(f)
if f_temp == os.path.abspath(__file__):
continue
files.append(f)
if len(files) != len(tracklist):
raise RuntimeError(
str(len(tracklist)) +
" file names were given but " +
str(len(files)) +
" files were found.")
sys.exit()
files.sort()
i = 0
for f in files:
os.rename(f, tracklist[i] + mp3_extension)
i += 1
|
import ID3
import os
import sys
def read_tracklist():
tracklist = []
for line in sys.stdin:
tracklist.append(line)
return tracklist
def match_length(files, tracklist):
if len(files) != len(tracklist):
raise RuntimeError(
str(len(tracklist)) +
" file names were given but " +
str(len(files)) +
" files were found.")
tracklist = read_tracklist()
mp3_extension = ".mp3"
files_all = os.listdir('.')
files = []
for f in files_all:
# Prune directories
if not os.path.isfile(f):
continue
# Prune non-MP3 files
filename, extension = os.path.splitext(f)
if extension != mp3_extension:
continue
# Prune this file
f_temp = os.path.abspath(f)
if f_temp == os.path.abspath(__file__):
continue
files.append(f)
match_length(files, tracklist)
files.sort()
i = 0
for f in files:
os.rename(f, tracklist[i] + mp3_extension)
i += 1
|
Move files/tracklist count check to function
|
MP3: Move files/tracklist count check to function
|
Python
|
mit
|
jleung51/scripts,jleung51/scripts,jleung51/scripts
|
import ID3
import os
import sys
def read_tracklist():
tracklist = []
for line in sys.stdin:
tracklist.append(line)
return tracklist
+
+ def match_length(files, tracklist):
+ if len(files) != len(tracklist):
+ raise RuntimeError(
+ str(len(tracklist)) +
+ " file names were given but " +
+ str(len(files)) +
+ " files were found.")
tracklist = read_tracklist()
mp3_extension = ".mp3"
files_all = os.listdir('.')
files = []
for f in files_all:
# Prune directories
if not os.path.isfile(f):
continue
# Prune non-MP3 files
filename, extension = os.path.splitext(f)
if extension != mp3_extension:
continue
# Prune this file
f_temp = os.path.abspath(f)
if f_temp == os.path.abspath(__file__):
continue
files.append(f)
+ match_length(files, tracklist)
- if len(files) != len(tracklist):
- raise RuntimeError(
- str(len(tracklist)) +
- " file names were given but " +
- str(len(files)) +
- " files were found.")
- sys.exit()
files.sort()
i = 0
for f in files:
os.rename(f, tracklist[i] + mp3_extension)
i += 1
|
Move files/tracklist count check to function
|
## Code Before:
import ID3
import os
import sys
def read_tracklist():
tracklist = []
for line in sys.stdin:
tracklist.append(line)
return tracklist
tracklist = read_tracklist()
mp3_extension = ".mp3"
files_all = os.listdir('.')
files = []
for f in files_all:
# Prune directories
if not os.path.isfile(f):
continue
# Prune non-MP3 files
filename, extension = os.path.splitext(f)
if extension != mp3_extension:
continue
# Prune this file
f_temp = os.path.abspath(f)
if f_temp == os.path.abspath(__file__):
continue
files.append(f)
if len(files) != len(tracklist):
raise RuntimeError(
str(len(tracklist)) +
" file names were given but " +
str(len(files)) +
" files were found.")
sys.exit()
files.sort()
i = 0
for f in files:
os.rename(f, tracklist[i] + mp3_extension)
i += 1
## Instruction:
Move files/tracklist count check to function
## Code After:
import ID3
import os
import sys
def read_tracklist():
tracklist = []
for line in sys.stdin:
tracklist.append(line)
return tracklist
def match_length(files, tracklist):
if len(files) != len(tracklist):
raise RuntimeError(
str(len(tracklist)) +
" file names were given but " +
str(len(files)) +
" files were found.")
tracklist = read_tracklist()
mp3_extension = ".mp3"
files_all = os.listdir('.')
files = []
for f in files_all:
# Prune directories
if not os.path.isfile(f):
continue
# Prune non-MP3 files
filename, extension = os.path.splitext(f)
if extension != mp3_extension:
continue
# Prune this file
f_temp = os.path.abspath(f)
if f_temp == os.path.abspath(__file__):
continue
files.append(f)
match_length(files, tracklist)
files.sort()
i = 0
for f in files:
os.rename(f, tracklist[i] + mp3_extension)
i += 1
|
...
return tracklist
def match_length(files, tracklist):
if len(files) != len(tracklist):
raise RuntimeError(
str(len(tracklist)) +
" file names were given but " +
str(len(files)) +
" files were found.")
...
match_length(files, tracklist)
...
|
94788bd7a7ba0a7799689c4613a2afbcc377649b
|
games/migrations/0016_auto_20161209_1256.py
|
games/migrations/0016_auto_20161209_1256.py
|
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def create_revisions(apps, schema_editor):
call_command('createinitialrevisions')
class Migration(migrations.Migration):
dependencies = [
('games', '0015_installer_draft'),
]
operations = [
migrations.RunPython(create_revisions)
]
|
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def create_revisions(apps, schema_editor):
call_command('createinitialrevisions')
class Migration(migrations.Migration):
dependencies = [
('games', '0015_installer_draft'),
('reversion', '0001_squashed_0004_auto_20160611_1202'),
]
operations = [
migrations.RunPython(create_revisions)
]
|
Add dependency to reversion data migration
|
Add dependency to reversion data migration
|
Python
|
agpl-3.0
|
Turupawn/website,Turupawn/website,lutris/website,lutris/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website
|
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def create_revisions(apps, schema_editor):
call_command('createinitialrevisions')
class Migration(migrations.Migration):
dependencies = [
('games', '0015_installer_draft'),
+ ('reversion', '0001_squashed_0004_auto_20160611_1202'),
]
operations = [
migrations.RunPython(create_revisions)
]
|
Add dependency to reversion data migration
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def create_revisions(apps, schema_editor):
call_command('createinitialrevisions')
class Migration(migrations.Migration):
dependencies = [
('games', '0015_installer_draft'),
]
operations = [
migrations.RunPython(create_revisions)
]
## Instruction:
Add dependency to reversion data migration
## Code After:
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def create_revisions(apps, schema_editor):
call_command('createinitialrevisions')
class Migration(migrations.Migration):
dependencies = [
('games', '0015_installer_draft'),
('reversion', '0001_squashed_0004_auto_20160611_1202'),
]
operations = [
migrations.RunPython(create_revisions)
]
|
// ... existing code ...
('games', '0015_installer_draft'),
('reversion', '0001_squashed_0004_auto_20160611_1202'),
]
// ... rest of the code ...
|
f468ea8123768a3f66621bfecae20814fa83017b
|
website_sale_clear_line/controllers/main.py
|
website_sale_clear_line/controllers/main.py
|
from openerp.http import request
from openerp import http
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, uid, context, pool = (
request.cr, request.uid, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, uid, line_id, context=context)
|
from openerp.http import request
from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
|
FIX website sale clear line
|
FIX website sale clear line
|
Python
|
agpl-3.0
|
ingadhoc/website
|
from openerp.http import request
- from openerp import http
+ from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
- cr, uid, context, pool = (
+ cr, context, pool = (
- request.cr, request.uid, request.context, request.registry)
+ request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
- cr, uid, line_id, context=context)
+ cr, SUPERUSER_ID, line_id, context=context)
|
FIX website sale clear line
|
## Code Before:
from openerp.http import request
from openerp import http
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, uid, context, pool = (
request.cr, request.uid, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, uid, line_id, context=context)
## Instruction:
FIX website sale clear line
## Code After:
from openerp.http import request
from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
|
# ... existing code ...
from openerp.http import request
from openerp import http, SUPERUSER_ID
# ... modified code ...
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
# ... rest of the code ...
|
a4104dea137b8fa2aedc38ac3bda53c559c1f45a
|
tests/test_opentransf.py
|
tests/test_opentransf.py
|
import pymorph
import numpy as np
def test_opentransf():
f = np.array([
[0,0,0,0,0,0,0,0],
[0,0,1,1,1,1,0,0],
[0,1,0,1,1,1,0,0],
[0,0,1,1,1,1,0,0],
[1,1,0,0,0,0,0,0]], bool)
ot = pymorph.opentransf( f, 'city-block')
for y in xrange(ot.shape[0]):
for x in xrange(ot.shape[1]):
r = ot[y,x]
t = f.copy()
for k in xrange(1, r+1):
assert t[y,x]
t = pymorph.open(f, pymorph.sedisk(k, 2, 'city-block'))
assert not t[y,x]
def test_all():
f = np.arange(9).reshape((3,3)) % 3 > 0
# linear-h crashed in 0.95
# and was underlying cause of crash in patsec(f, 'linear-h')
g = pymorph.opentransf(f, 'linear-h')
|
import pymorph
import numpy as np
def test_opentransf():
f = np.array([
[0,0,0,0,0,0,0,0],
[0,0,1,1,1,1,0,0],
[0,1,0,1,1,1,0,0],
[0,0,1,1,1,1,0,0],
[1,1,0,0,0,0,0,0]], bool)
ot = pymorph.opentransf( f, 'city-block')
for y in xrange(ot.shape[0]):
for x in xrange(ot.shape[1]):
r = ot[y,x]
t = f.copy()
for k in xrange(1, r+1):
assert t[y,x]
t = pymorph.open(f, pymorph.sedisk(k, 2, 'city-block'))
assert not t[y,x]
def test_all_types():
f = np.arange(9).reshape((3,3)) % 3 > 0
# linear-h crashed in 0.95
# and was underlying cause of crash in patsec(f, 'linear-h')
def test_type(type, Buser):
g = pymorph.opentransf(f, type, Buser=Buser)
yield test_type, 'linear-h', None
yield test_type, 'octagon', None
yield test_type, 'chessboard', None
yield test_type, 'city-block', None
yield test_type, 'linear-v', None
yield test_type, 'linear-45r', None
yield test_type, 'linear-45l', None
Buser = np.ones((3,3),bool)
Buser[2,2] = 0
yield test_type, 'user', Buser
|
Test all cases of type for opentransf
|
TST: Test all cases of type for opentransf
|
Python
|
bsd-3-clause
|
luispedro/pymorph
|
import pymorph
import numpy as np
def test_opentransf():
f = np.array([
[0,0,0,0,0,0,0,0],
[0,0,1,1,1,1,0,0],
[0,1,0,1,1,1,0,0],
[0,0,1,1,1,1,0,0],
[1,1,0,0,0,0,0,0]], bool)
ot = pymorph.opentransf( f, 'city-block')
for y in xrange(ot.shape[0]):
for x in xrange(ot.shape[1]):
r = ot[y,x]
t = f.copy()
for k in xrange(1, r+1):
assert t[y,x]
t = pymorph.open(f, pymorph.sedisk(k, 2, 'city-block'))
assert not t[y,x]
- def test_all():
+ def test_all_types():
f = np.arange(9).reshape((3,3)) % 3 > 0
# linear-h crashed in 0.95
# and was underlying cause of crash in patsec(f, 'linear-h')
- g = pymorph.opentransf(f, 'linear-h')
+ def test_type(type, Buser):
+ g = pymorph.opentransf(f, type, Buser=Buser)
+ yield test_type, 'linear-h', None
+ yield test_type, 'octagon', None
+ yield test_type, 'chessboard', None
+ yield test_type, 'city-block', None
+ yield test_type, 'linear-v', None
+ yield test_type, 'linear-45r', None
+ yield test_type, 'linear-45l', None
+ Buser = np.ones((3,3),bool)
+ Buser[2,2] = 0
+ yield test_type, 'user', Buser
+
|
Test all cases of type for opentransf
|
## Code Before:
import pymorph
import numpy as np
def test_opentransf():
f = np.array([
[0,0,0,0,0,0,0,0],
[0,0,1,1,1,1,0,0],
[0,1,0,1,1,1,0,0],
[0,0,1,1,1,1,0,0],
[1,1,0,0,0,0,0,0]], bool)
ot = pymorph.opentransf( f, 'city-block')
for y in xrange(ot.shape[0]):
for x in xrange(ot.shape[1]):
r = ot[y,x]
t = f.copy()
for k in xrange(1, r+1):
assert t[y,x]
t = pymorph.open(f, pymorph.sedisk(k, 2, 'city-block'))
assert not t[y,x]
def test_all():
f = np.arange(9).reshape((3,3)) % 3 > 0
# linear-h crashed in 0.95
# and was underlying cause of crash in patsec(f, 'linear-h')
g = pymorph.opentransf(f, 'linear-h')
## Instruction:
Test all cases of type for opentransf
## Code After:
import pymorph
import numpy as np
def test_opentransf():
f = np.array([
[0,0,0,0,0,0,0,0],
[0,0,1,1,1,1,0,0],
[0,1,0,1,1,1,0,0],
[0,0,1,1,1,1,0,0],
[1,1,0,0,0,0,0,0]], bool)
ot = pymorph.opentransf( f, 'city-block')
for y in xrange(ot.shape[0]):
for x in xrange(ot.shape[1]):
r = ot[y,x]
t = f.copy()
for k in xrange(1, r+1):
assert t[y,x]
t = pymorph.open(f, pymorph.sedisk(k, 2, 'city-block'))
assert not t[y,x]
def test_all_types():
f = np.arange(9).reshape((3,3)) % 3 > 0
# linear-h crashed in 0.95
# and was underlying cause of crash in patsec(f, 'linear-h')
def test_type(type, Buser):
g = pymorph.opentransf(f, type, Buser=Buser)
yield test_type, 'linear-h', None
yield test_type, 'octagon', None
yield test_type, 'chessboard', None
yield test_type, 'city-block', None
yield test_type, 'linear-v', None
yield test_type, 'linear-45r', None
yield test_type, 'linear-45l', None
Buser = np.ones((3,3),bool)
Buser[2,2] = 0
yield test_type, 'user', Buser
|
...
def test_all_types():
f = np.arange(9).reshape((3,3)) % 3 > 0
...
# and was underlying cause of crash in patsec(f, 'linear-h')
def test_type(type, Buser):
g = pymorph.opentransf(f, type, Buser=Buser)
yield test_type, 'linear-h', None
yield test_type, 'octagon', None
yield test_type, 'chessboard', None
yield test_type, 'city-block', None
yield test_type, 'linear-v', None
yield test_type, 'linear-45r', None
yield test_type, 'linear-45l', None
Buser = np.ones((3,3),bool)
Buser[2,2] = 0
yield test_type, 'user', Buser
...
|
b1a91fc4e843197a12be653aa60d5cdf32f31423
|
tests/test_recursion.py
|
tests/test_recursion.py
|
from tests import utils
def test_recursion():
uwhois = utils.create_uwhois()
expected = 'whois.markmonitor.com'
transcript = utils.read_transcript('google.com.txt')
# Make sure there's nothing wrong with the WHOIS transcript.
assert transcript.count(expected) == 1
assert uwhois.get_registrar_whois_server('com', transcript) == expected
|
from tests import utils
def test_recursion():
uwhois = utils.create_uwhois()
expected = 'whois.markmonitor.com'
transcript = utils.read_transcript('google.com.txt')
# Make sure there's nothing wrong with the WHOIS transcript.
assert transcript.count(expected) == 1
server, port = uwhois.get_whois_server('com')
pattern = uwhois.get_recursion_pattern(server)
assert uwhois.get_registrar_whois_server(pattern, transcript) == expected
|
Fix test for the fork
|
Fix test for the fork
|
Python
|
mit
|
Rafiot/uwhoisd,Rafiot/uwhoisd
|
from tests import utils
def test_recursion():
uwhois = utils.create_uwhois()
expected = 'whois.markmonitor.com'
transcript = utils.read_transcript('google.com.txt')
# Make sure there's nothing wrong with the WHOIS transcript.
assert transcript.count(expected) == 1
+ server, port = uwhois.get_whois_server('com')
+ pattern = uwhois.get_recursion_pattern(server)
- assert uwhois.get_registrar_whois_server('com', transcript) == expected
+ assert uwhois.get_registrar_whois_server(pattern, transcript) == expected
|
Fix test for the fork
|
## Code Before:
from tests import utils
def test_recursion():
uwhois = utils.create_uwhois()
expected = 'whois.markmonitor.com'
transcript = utils.read_transcript('google.com.txt')
# Make sure there's nothing wrong with the WHOIS transcript.
assert transcript.count(expected) == 1
assert uwhois.get_registrar_whois_server('com', transcript) == expected
## Instruction:
Fix test for the fork
## Code After:
from tests import utils
def test_recursion():
uwhois = utils.create_uwhois()
expected = 'whois.markmonitor.com'
transcript = utils.read_transcript('google.com.txt')
# Make sure there's nothing wrong with the WHOIS transcript.
assert transcript.count(expected) == 1
server, port = uwhois.get_whois_server('com')
pattern = uwhois.get_recursion_pattern(server)
assert uwhois.get_registrar_whois_server(pattern, transcript) == expected
|
...
assert transcript.count(expected) == 1
server, port = uwhois.get_whois_server('com')
pattern = uwhois.get_recursion_pattern(server)
assert uwhois.get_registrar_whois_server(pattern, transcript) == expected
...
|
f76901831084c11ec633eb96c310860d15199edd
|
distutils/__init__.py
|
distutils/__init__.py
|
import sys
import importlib
__version__ = sys.version[:sys.version.index(' ')]
try:
# Allow Debian and pkgsrc and Fedora (only) to customize
# system
# behavior. Ref pypa/distutils#2 and pypa/distutils#16
# and pypa/distutils#70.
# This hook is deprecated and no other environments
# should use it.
importlib.import_module('_distutils_system_mod')
except ImportError:
pass
|
import sys
import importlib
__version__ = sys.version[:sys.version.index(' ')]
try:
# Allow Debian and pkgsrc (only) to customize system
# behavior. Ref pypa/distutils#2 and pypa/distutils#16.
# This hook is deprecated and no other environments
# should use it.
importlib.import_module('_distutils_system_mod')
except ImportError:
pass
|
Revert "Update comment for _distutils_system_mod." as Fedora is not using that hook.
|
Revert "Update comment for _distutils_system_mod." as Fedora is not using that hook.
This reverts commit 8c64fdc8560d9f7b7d3926350bba7702b0906329.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
import sys
import importlib
__version__ = sys.version[:sys.version.index(' ')]
try:
- # Allow Debian and pkgsrc and Fedora (only) to customize
+ # Allow Debian and pkgsrc (only) to customize system
- # system
- # behavior. Ref pypa/distutils#2 and pypa/distutils#16
+ # behavior. Ref pypa/distutils#2 and pypa/distutils#16.
- # and pypa/distutils#70.
# This hook is deprecated and no other environments
# should use it.
importlib.import_module('_distutils_system_mod')
except ImportError:
pass
|
Revert "Update comment for _distutils_system_mod." as Fedora is not using that hook.
|
## Code Before:
import sys
import importlib
__version__ = sys.version[:sys.version.index(' ')]
try:
# Allow Debian and pkgsrc and Fedora (only) to customize
# system
# behavior. Ref pypa/distutils#2 and pypa/distutils#16
# and pypa/distutils#70.
# This hook is deprecated and no other environments
# should use it.
importlib.import_module('_distutils_system_mod')
except ImportError:
pass
## Instruction:
Revert "Update comment for _distutils_system_mod." as Fedora is not using that hook.
## Code After:
import sys
import importlib
__version__ = sys.version[:sys.version.index(' ')]
try:
# Allow Debian and pkgsrc (only) to customize system
# behavior. Ref pypa/distutils#2 and pypa/distutils#16.
# This hook is deprecated and no other environments
# should use it.
importlib.import_module('_distutils_system_mod')
except ImportError:
pass
|
# ... existing code ...
try:
# Allow Debian and pkgsrc (only) to customize system
# behavior. Ref pypa/distutils#2 and pypa/distutils#16.
# This hook is deprecated and no other environments
# ... rest of the code ...
|
d1826b00f4b4944161c66e737978bdc87bb57b52
|
polyaxon/libs/decorators.py
|
polyaxon/libs/decorators.py
|
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
ignore_raw = IgnoreRawDecorator
|
from django.conf import settings
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
class RunnerSignalDecorator(object):
"""The `RunnerSignalDecorator` is a decorator to ignore signals related to runner.
This is useful to ignore any signal that is runner specific.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@runner_signal
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if not settings.DEPLOY_RUNNER:
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
ignore_raw = IgnoreRawDecorator
runner_signal = RunnerSignalDecorator
|
Add decorator for runner signals
|
Add decorator for runner signals
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
+ from django.conf import settings
+
+
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
+ class RunnerSignalDecorator(object):
+ """The `RunnerSignalDecorator` is a decorator to ignore signals related to runner.
+
+ This is useful to ignore any signal that is runner specific.
+
+ usage example:
+ @receiver(post_save, sender=settings.AUTH_USER_MODEL)
+ @runner_signal
+ @ignore_raw
+ def my_signal_handler(sender, instance=None, created=False, **kwargs):
+ ...
+ return ...
+ """
+
+ def __init__(self, f):
+ self.f = f
+
+ def __call__(self, *args, **kwargs):
+ if not settings.DEPLOY_RUNNER:
+ # Ignore signal handling for fixture loading
+ return
+
+ return self.f(*args, **kwargs)
+
+
ignore_raw = IgnoreRawDecorator
+ runner_signal = RunnerSignalDecorator
|
Add decorator for runner signals
|
## Code Before:
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
ignore_raw = IgnoreRawDecorator
## Instruction:
Add decorator for runner signals
## Code After:
from django.conf import settings
class IgnoreRawDecorator(object):
"""The `IgnoreRawDecorator` is a decorator to ignore raw/fixture data during signals handling.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if kwargs.get('raw'):
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
class RunnerSignalDecorator(object):
"""The `RunnerSignalDecorator` is a decorator to ignore signals related to runner.
This is useful to ignore any signal that is runner specific.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@runner_signal
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if not settings.DEPLOY_RUNNER:
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
ignore_raw = IgnoreRawDecorator
runner_signal = RunnerSignalDecorator
|
// ... existing code ...
from django.conf import settings
class IgnoreRawDecorator(object):
// ... modified code ...
class RunnerSignalDecorator(object):
"""The `RunnerSignalDecorator` is a decorator to ignore signals related to runner.
This is useful to ignore any signal that is runner specific.
usage example:
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@runner_signal
@ignore_raw
def my_signal_handler(sender, instance=None, created=False, **kwargs):
...
return ...
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
if not settings.DEPLOY_RUNNER:
# Ignore signal handling for fixture loading
return
return self.f(*args, **kwargs)
ignore_raw = IgnoreRawDecorator
runner_signal = RunnerSignalDecorator
// ... rest of the code ...
|
4c1bf1757baa5beec50377724961c528f5985864
|
ptest/screencapturer.py
|
ptest/screencapturer.py
|
import threading
import traceback
import plogger
__author__ = 'karl.gong'
def take_screen_shot():
current_thread = threading.currentThread()
active_browser = current_thread.get_property("browser")
if active_browser is not None:
while True:
try:
active_browser.switch_to.alert.dismiss()
except Exception:
break
try:
screen_shot = active_browser.get_screenshot_as_png()
except Exception as e:
plogger.warn("Failed to take the screenshot: \n%s\n%s" % (e.message, traceback.format_exc()))
return
current_thread.get_property("running_test_case_fixture").screen_shot = screen_shot
else:
pass # todo: take screen shot for desktop
|
import threading
import traceback
import StringIO
import plogger
try:
from PIL import ImageGrab
except ImportError:
PIL_installed = False
else:
PIL_installed = True
try:
import wx
except ImportError:
wxpython_installed = False
else:
wxpython_installed = True
__author__ = 'karl.gong'
def take_screen_shot():
current_thread = threading.currentThread()
active_browser = current_thread.get_property("browser")
if active_browser is not None:
while True:
try:
active_browser.switch_to.alert.dismiss()
except Exception:
break
def capture_screen():
return active_browser.get_screenshot_as_png()
elif PIL_installed:
def capture_screen():
output = StringIO.StringIO()
ImageGrab.grab().save(output, format="png")
return output.getvalue()
elif wxpython_installed:
def capture_screen():
app = wx.App(False)
screen = wx.ScreenDC()
width, height = screen.GetSize()
bmp = wx.EmptyBitmap(width, height)
mem = wx.MemoryDC(bmp)
mem.Blit(0, 0, width, height, screen, 0, 0)
output = StringIO.StringIO()
bmp.ConvertToImage().SaveStream(output, wx.BITMAP_TYPE_PNG)
return output.getvalue()
else:
return
try:
current_thread.get_property("running_test_case_fixture").screen_shot = capture_screen()
except Exception as e:
plogger.warn("Failed to take the screenshot: \n%screen\n%screen" % (e.message, traceback.format_exc()))
|
Support capture screenshot for no-selenium test
|
Support capture screenshot for no-selenium test
|
Python
|
apache-2.0
|
KarlGong/ptest,KarlGong/ptest
|
import threading
import traceback
+ import StringIO
import plogger
+
+ try:
+ from PIL import ImageGrab
+ except ImportError:
+ PIL_installed = False
+ else:
+ PIL_installed = True
+
+ try:
+ import wx
+ except ImportError:
+ wxpython_installed = False
+ else:
+ wxpython_installed = True
__author__ = 'karl.gong'
def take_screen_shot():
current_thread = threading.currentThread()
active_browser = current_thread.get_property("browser")
if active_browser is not None:
while True:
try:
active_browser.switch_to.alert.dismiss()
except Exception:
break
- try:
+ def capture_screen():
- screen_shot = active_browser.get_screenshot_as_png()
+ return active_browser.get_screenshot_as_png()
- except Exception as e:
- plogger.warn("Failed to take the screenshot: \n%s\n%s" % (e.message, traceback.format_exc()))
+ elif PIL_installed:
+ def capture_screen():
+ output = StringIO.StringIO()
+ ImageGrab.grab().save(output, format="png")
+ return output.getvalue()
+ elif wxpython_installed:
+ def capture_screen():
+ app = wx.App(False)
+ screen = wx.ScreenDC()
+ width, height = screen.GetSize()
+ bmp = wx.EmptyBitmap(width, height)
+ mem = wx.MemoryDC(bmp)
+ mem.Blit(0, 0, width, height, screen, 0, 0)
+ output = StringIO.StringIO()
+ bmp.ConvertToImage().SaveStream(output, wx.BITMAP_TYPE_PNG)
+ return output.getvalue()
+ else:
- return
+ return
+ try:
- current_thread.get_property("running_test_case_fixture").screen_shot = screen_shot
+ current_thread.get_property("running_test_case_fixture").screen_shot = capture_screen()
+ except Exception as e:
+ plogger.warn("Failed to take the screenshot: \n%screen\n%screen" % (e.message, traceback.format_exc()))
-
- else:
- pass # todo: take screen shot for desktop
|
Support capture screenshot for no-selenium test
|
## Code Before:
import threading
import traceback
import plogger
__author__ = 'karl.gong'
def take_screen_shot():
current_thread = threading.currentThread()
active_browser = current_thread.get_property("browser")
if active_browser is not None:
while True:
try:
active_browser.switch_to.alert.dismiss()
except Exception:
break
try:
screen_shot = active_browser.get_screenshot_as_png()
except Exception as e:
plogger.warn("Failed to take the screenshot: \n%s\n%s" % (e.message, traceback.format_exc()))
return
current_thread.get_property("running_test_case_fixture").screen_shot = screen_shot
else:
pass # todo: take screen shot for desktop
## Instruction:
Support capture screenshot for no-selenium test
## Code After:
import threading
import traceback
import StringIO
import plogger
try:
from PIL import ImageGrab
except ImportError:
PIL_installed = False
else:
PIL_installed = True
try:
import wx
except ImportError:
wxpython_installed = False
else:
wxpython_installed = True
__author__ = 'karl.gong'
def take_screen_shot():
current_thread = threading.currentThread()
active_browser = current_thread.get_property("browser")
if active_browser is not None:
while True:
try:
active_browser.switch_to.alert.dismiss()
except Exception:
break
def capture_screen():
return active_browser.get_screenshot_as_png()
elif PIL_installed:
def capture_screen():
output = StringIO.StringIO()
ImageGrab.grab().save(output, format="png")
return output.getvalue()
elif wxpython_installed:
def capture_screen():
app = wx.App(False)
screen = wx.ScreenDC()
width, height = screen.GetSize()
bmp = wx.EmptyBitmap(width, height)
mem = wx.MemoryDC(bmp)
mem.Blit(0, 0, width, height, screen, 0, 0)
output = StringIO.StringIO()
bmp.ConvertToImage().SaveStream(output, wx.BITMAP_TYPE_PNG)
return output.getvalue()
else:
return
try:
current_thread.get_property("running_test_case_fixture").screen_shot = capture_screen()
except Exception as e:
plogger.warn("Failed to take the screenshot: \n%screen\n%screen" % (e.message, traceback.format_exc()))
|
# ... existing code ...
import traceback
import StringIO
# ... modified code ...
try:
from PIL import ImageGrab
except ImportError:
PIL_installed = False
else:
PIL_installed = True
try:
import wx
except ImportError:
wxpython_installed = False
else:
wxpython_installed = True
...
def capture_screen():
return active_browser.get_screenshot_as_png()
elif PIL_installed:
def capture_screen():
output = StringIO.StringIO()
ImageGrab.grab().save(output, format="png")
return output.getvalue()
elif wxpython_installed:
def capture_screen():
app = wx.App(False)
screen = wx.ScreenDC()
width, height = screen.GetSize()
bmp = wx.EmptyBitmap(width, height)
mem = wx.MemoryDC(bmp)
mem.Blit(0, 0, width, height, screen, 0, 0)
output = StringIO.StringIO()
bmp.ConvertToImage().SaveStream(output, wx.BITMAP_TYPE_PNG)
return output.getvalue()
else:
return
try:
current_thread.get_property("running_test_case_fixture").screen_shot = capture_screen()
except Exception as e:
plogger.warn("Failed to take the screenshot: \n%screen\n%screen" % (e.message, traceback.format_exc()))
# ... rest of the code ...
|
53dd7c112d3f1781e8b7c662ba52c805a6afa568
|
scripts/3-create-database.py
|
scripts/3-create-database.py
|
import glob
import logging
import sqlite3
import pandas as pd
log = logging.getLogger(__name__)
log.setLevel("INFO")
CSV_FILENAME = "../k2-target-pixel-files.csv"
SQLITE_FILENAME = "../k2-target-pixel-files.db"
if __name__ == "__main__":
log.info("Reading the data")
df = pd.concat([pd.read_csv(fn)
for fn
in glob.glob("intermediate-data/*metadata.csv")])
# Write to the CSV file
log.info("Writing {}".format(CSV_FILENAME))
df.to_csv(CSV_FILENAME, index=False)
# Write the SQLite table
log.info("Writing {}".format(SQLITE_FILENAME))
con = sqlite3.connect(SQLITE_FILENAME)
df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
|
import glob
import logging
import sqlite3
import pandas as pd
log = logging.getLogger(__name__)
log.setLevel("INFO")
CSV_FILENAME = "../k2-target-pixel-files.csv"
SQLITE_FILENAME = "../k2-target-pixel-files.db"
if __name__ == "__main__":
log.info("Reading the data")
df = pd.concat([pd.read_csv(fn)
for fn
in glob.glob("intermediate-data/*metadata.csv")])
df = df.sort_values("keplerid")
# Write to the CSV file
log.info("Writing {}".format(CSV_FILENAME))
df.to_csv(CSV_FILENAME, index=False)
# Write the SQLite table
log.info("Writing {}".format(SQLITE_FILENAME))
con = sqlite3.connect(SQLITE_FILENAME)
df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
|
Sort the final table by keplerid
|
Sort the final table by keplerid
|
Python
|
mit
|
barentsen/K2metadata,KeplerGO/K2metadata,barentsen/k2-target-index
|
import glob
import logging
import sqlite3
import pandas as pd
log = logging.getLogger(__name__)
log.setLevel("INFO")
CSV_FILENAME = "../k2-target-pixel-files.csv"
SQLITE_FILENAME = "../k2-target-pixel-files.db"
if __name__ == "__main__":
log.info("Reading the data")
df = pd.concat([pd.read_csv(fn)
for fn
in glob.glob("intermediate-data/*metadata.csv")])
+ df = df.sort_values("keplerid")
# Write to the CSV file
log.info("Writing {}".format(CSV_FILENAME))
df.to_csv(CSV_FILENAME, index=False)
# Write the SQLite table
log.info("Writing {}".format(SQLITE_FILENAME))
con = sqlite3.connect(SQLITE_FILENAME)
df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
|
Sort the final table by keplerid
|
## Code Before:
import glob
import logging
import sqlite3
import pandas as pd
log = logging.getLogger(__name__)
log.setLevel("INFO")
CSV_FILENAME = "../k2-target-pixel-files.csv"
SQLITE_FILENAME = "../k2-target-pixel-files.db"
if __name__ == "__main__":
log.info("Reading the data")
df = pd.concat([pd.read_csv(fn)
for fn
in glob.glob("intermediate-data/*metadata.csv")])
# Write to the CSV file
log.info("Writing {}".format(CSV_FILENAME))
df.to_csv(CSV_FILENAME, index=False)
# Write the SQLite table
log.info("Writing {}".format(SQLITE_FILENAME))
con = sqlite3.connect(SQLITE_FILENAME)
df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
## Instruction:
Sort the final table by keplerid
## Code After:
import glob
import logging
import sqlite3
import pandas as pd
log = logging.getLogger(__name__)
log.setLevel("INFO")
CSV_FILENAME = "../k2-target-pixel-files.csv"
SQLITE_FILENAME = "../k2-target-pixel-files.db"
if __name__ == "__main__":
log.info("Reading the data")
df = pd.concat([pd.read_csv(fn)
for fn
in glob.glob("intermediate-data/*metadata.csv")])
df = df.sort_values("keplerid")
# Write to the CSV file
log.info("Writing {}".format(CSV_FILENAME))
df.to_csv(CSV_FILENAME, index=False)
# Write the SQLite table
log.info("Writing {}".format(SQLITE_FILENAME))
con = sqlite3.connect(SQLITE_FILENAME)
df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
|
...
in glob.glob("intermediate-data/*metadata.csv")])
df = df.sort_values("keplerid")
...
|
9eb07a5b7d2875cf79bb698864d11ef29576133e
|
comics/utils/hash.py
|
comics/utils/hash.py
|
import hashlib
def sha256sum(filename):
"""Returns sha256sum for file"""
f = file(filename, 'rb')
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
f.close()
return m.hexdigest()
|
import hashlib
def sha256sum(filename=None, filehandle=None):
"""Returns sha256sum for file"""
if filename is not None:
f = file(filename, 'rb')
else:
f = filehandle
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
if filename is not None:
f.close()
return m.hexdigest()
|
Make sha256sum work with open filehandles too
|
Make sha256sum work with open filehandles too
|
Python
|
agpl-3.0
|
datagutten/comics,jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,klette/comics,klette/comics,klette/comics,jodal/comics,datagutten/comics
|
import hashlib
- def sha256sum(filename):
+ def sha256sum(filename=None, filehandle=None):
"""Returns sha256sum for file"""
+ if filename is not None:
- f = file(filename, 'rb')
+ f = file(filename, 'rb')
+ else:
+ f = filehandle
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
+ if filename is not None:
- f.close()
+ f.close()
return m.hexdigest()
|
Make sha256sum work with open filehandles too
|
## Code Before:
import hashlib
def sha256sum(filename):
"""Returns sha256sum for file"""
f = file(filename, 'rb')
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
f.close()
return m.hexdigest()
## Instruction:
Make sha256sum work with open filehandles too
## Code After:
import hashlib
def sha256sum(filename=None, filehandle=None):
"""Returns sha256sum for file"""
if filename is not None:
f = file(filename, 'rb')
else:
f = filehandle
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
if filename is not None:
f.close()
return m.hexdigest()
|
...
def sha256sum(filename=None, filehandle=None):
"""Returns sha256sum for file"""
...
if filename is not None:
f = file(filename, 'rb')
else:
f = filehandle
m = hashlib.sha256()
...
m.update(b)
if filename is not None:
f.close()
return m.hexdigest()
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.