commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d96dbe9f5688e469f34c7428569eda7d2c86f3d7
|
tests/test_err.py
|
tests/test_err.py
|
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = exc_info.value.message
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = str(exc_info.value)
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
Test str repr of exception
|
Test str repr of exception
|
Python
|
bsd-3-clause
|
kapadia/rasterio,brendan-ward/rasterio,brendan-ward/rasterio,brendan-ward/rasterio,kapadia/rasterio,kapadia/rasterio
|
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
- msg = exc_info.value.message
+ msg = str(exc_info.value)
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
Test str repr of exception
|
## Code Before:
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = exc_info.value.message
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
## Instruction:
Test str repr of exception
## Code After:
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = str(exc_info.value)
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
- msg = exc_info.value.message
? ^^^^^^^^
+ msg = str(exc_info.value)
? ++++ ^
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
d2bec26a63877e31e2d887e0879a8fd197741147
|
thinc/t2t.py
|
thinc/t2t.py
|
from __future__ import unicode_literals
from .neural._classes.convolution import ExtractWindow # noqa: F401
from .neural._classes.attention import ParametricAttention # noqa: F401
from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401
|
from __future__ import unicode_literals
from .neural._classes.convolution import ExtractWindow # noqa: F401
from .neural._classes.attention import ParametricAttention # noqa: F401
from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401
from .neural._classes.multiheaded_attention import MultiHeadedAttention
from .neural._classes.multiheaded_attention import prepare_self_attention
|
Add import links for MultiHeadedAttention and prepare_self_attention
|
Add import links for MultiHeadedAttention and prepare_self_attention
|
Python
|
mit
|
spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc
|
from __future__ import unicode_literals
from .neural._classes.convolution import ExtractWindow # noqa: F401
from .neural._classes.attention import ParametricAttention # noqa: F401
from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401
+ from .neural._classes.multiheaded_attention import MultiHeadedAttention
+ from .neural._classes.multiheaded_attention import prepare_self_attention
|
Add import links for MultiHeadedAttention and prepare_self_attention
|
## Code Before:
from __future__ import unicode_literals
from .neural._classes.convolution import ExtractWindow # noqa: F401
from .neural._classes.attention import ParametricAttention # noqa: F401
from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401
## Instruction:
Add import links for MultiHeadedAttention and prepare_self_attention
## Code After:
from __future__ import unicode_literals
from .neural._classes.convolution import ExtractWindow # noqa: F401
from .neural._classes.attention import ParametricAttention # noqa: F401
from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401
from .neural._classes.multiheaded_attention import MultiHeadedAttention
from .neural._classes.multiheaded_attention import prepare_self_attention
|
from __future__ import unicode_literals
from .neural._classes.convolution import ExtractWindow # noqa: F401
from .neural._classes.attention import ParametricAttention # noqa: F401
from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401
+ from .neural._classes.multiheaded_attention import MultiHeadedAttention
+ from .neural._classes.multiheaded_attention import prepare_self_attention
|
d386c389b9e350b01fdf25f7cd91857d3fbb1ead
|
opps/contrib/multisite/admin.py
|
opps/contrib/multisite/admin.py
|
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
if not settings.OPPS_MULTISITE_ADMIN:
return queryset
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission
|
Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission
|
Python
|
mit
|
opps/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps
|
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
+ if not settings.OPPS_MULTISITE_ADMIN:
+ return queryset
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission
|
## Code Before:
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
## Instruction:
Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission
## Code After:
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
if not settings.OPPS_MULTISITE_ADMIN:
return queryset
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
+ if not settings.OPPS_MULTISITE_ADMIN:
+ return queryset
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
b9671e96e40b38d0662dbe0e32dca0ca0c5fe62e
|
tensor2tensor/rl/trainer_model_based_test.py
|
tensor2tensor/rl/trainer_model_based_test.py
|
"""Tiny run of trainer_model_based. Smoke test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensor2tensor.rl import trainer_model_based
import tensorflow as tf
FLAGS = tf.flags.FLAGS
class ModelRLExperimentTest(tf.test.TestCase):
def test_basic(self):
FLAGS.output_dir = tf.test.get_temp_dir()
FLAGS.loop_hparams_set = "rl_modelrl_tiny"
FLAGS.schedule = "train" # skip evaluation for world model training
trainer_model_based.main(None)
if __name__ == "__main__":
tf.test.main()
|
"""Tiny run of trainer_model_based. Smoke test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
from tensor2tensor.rl import trainer_model_based
import tensorflow as tf
FLAGS = tf.flags.FLAGS
class ModelRLExperimentTest(tf.test.TestCase):
def setUp(self):
super(ModelRLExperimentTest, self).setUp()
FLAGS.output_dir = tf.test.get_temp_dir()
shutil.rmtree(FLAGS.output_dir)
os.mkdir(FLAGS.output_dir)
FLAGS.schedule = "train" # skip evaluation for world model training
def test_basic(self):
FLAGS.loop_hparams_set = "rl_modelrl_tiny"
trainer_model_based.main(None)
def test_ae(self):
FLAGS.loop_hparams_set = "rl_modelrl_ae_tiny"
trainer_model_based.main(None)
if __name__ == "__main__":
tf.test.main()
|
Add a test for the AE experiment
|
Add a test for the AE experiment
|
Python
|
apache-2.0
|
tensorflow/tensor2tensor,tensorflow/tensor2tensor,vthorsteinsson/tensor2tensor,tensorflow/tensor2tensor,vthorsteinsson/tensor2tensor,tensorflow/tensor2tensor,vthorsteinsson/tensor2tensor,tensorflow/tensor2tensor,vthorsteinsson/tensor2tensor
|
"""Tiny run of trainer_model_based. Smoke test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+
+ import os
+ import shutil
from tensor2tensor.rl import trainer_model_based
import tensorflow as tf
FLAGS = tf.flags.FLAGS
class ModelRLExperimentTest(tf.test.TestCase):
+ def setUp(self):
+ super(ModelRLExperimentTest, self).setUp()
+ FLAGS.output_dir = tf.test.get_temp_dir()
+ shutil.rmtree(FLAGS.output_dir)
+ os.mkdir(FLAGS.output_dir)
+ FLAGS.schedule = "train" # skip evaluation for world model training
+
def test_basic(self):
- FLAGS.output_dir = tf.test.get_temp_dir()
FLAGS.loop_hparams_set = "rl_modelrl_tiny"
- FLAGS.schedule = "train" # skip evaluation for world model training
+ trainer_model_based.main(None)
+
+ def test_ae(self):
+ FLAGS.loop_hparams_set = "rl_modelrl_ae_tiny"
trainer_model_based.main(None)
if __name__ == "__main__":
tf.test.main()
|
Add a test for the AE experiment
|
## Code Before:
"""Tiny run of trainer_model_based. Smoke test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensor2tensor.rl import trainer_model_based
import tensorflow as tf
FLAGS = tf.flags.FLAGS
class ModelRLExperimentTest(tf.test.TestCase):
def test_basic(self):
FLAGS.output_dir = tf.test.get_temp_dir()
FLAGS.loop_hparams_set = "rl_modelrl_tiny"
FLAGS.schedule = "train" # skip evaluation for world model training
trainer_model_based.main(None)
if __name__ == "__main__":
tf.test.main()
## Instruction:
Add a test for the AE experiment
## Code After:
"""Tiny run of trainer_model_based. Smoke test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
from tensor2tensor.rl import trainer_model_based
import tensorflow as tf
FLAGS = tf.flags.FLAGS
class ModelRLExperimentTest(tf.test.TestCase):
def setUp(self):
super(ModelRLExperimentTest, self).setUp()
FLAGS.output_dir = tf.test.get_temp_dir()
shutil.rmtree(FLAGS.output_dir)
os.mkdir(FLAGS.output_dir)
FLAGS.schedule = "train" # skip evaluation for world model training
def test_basic(self):
FLAGS.loop_hparams_set = "rl_modelrl_tiny"
trainer_model_based.main(None)
def test_ae(self):
FLAGS.loop_hparams_set = "rl_modelrl_ae_tiny"
trainer_model_based.main(None)
if __name__ == "__main__":
tf.test.main()
|
"""Tiny run of trainer_model_based. Smoke test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+
+ import os
+ import shutil
from tensor2tensor.rl import trainer_model_based
import tensorflow as tf
FLAGS = tf.flags.FLAGS
class ModelRLExperimentTest(tf.test.TestCase):
+ def setUp(self):
+ super(ModelRLExperimentTest, self).setUp()
+ FLAGS.output_dir = tf.test.get_temp_dir()
+ shutil.rmtree(FLAGS.output_dir)
+ os.mkdir(FLAGS.output_dir)
+ FLAGS.schedule = "train" # skip evaluation for world model training
+
def test_basic(self):
- FLAGS.output_dir = tf.test.get_temp_dir()
FLAGS.loop_hparams_set = "rl_modelrl_tiny"
- FLAGS.schedule = "train" # skip evaluation for world model training
+ trainer_model_based.main(None)
+
+ def test_ae(self):
+ FLAGS.loop_hparams_set = "rl_modelrl_ae_tiny"
trainer_model_based.main(None)
if __name__ == "__main__":
tf.test.main()
|
f627a76e8dac96282b0a9f76eeda8c7db70cc030
|
telemetry/telemetry/internal/actions/javascript_click.py
|
telemetry/telemetry/internal/actions/javascript_click.py
|
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code)
|
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code, user_gesture=True)
|
Fix a regression where the user_gesture bit isn't set for ClickElement.
|
Fix a regression where the user_gesture bit isn't set for ClickElement.
The regrssion was introduced in
https://chromium-review.googlesource.com/c/catapult/+/1335627
Once this rolls into Chromium, I'll add a chromium side test to prevent
it from regress again in the future.
Bug: chromium:885912
TEST=manual
[email protected],[email protected]
Change-Id: Ic1c7e83a3e7d7318baa81531925dab07db9450ca
Reviewed-on: https://chromium-review.googlesource.com/c/1476957
Reviewed-by: Caleb Rouleau <[email protected]>
Commit-Queue: Zhenyao Mo <[email protected]>
|
Python
|
bsd-3-clause
|
catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult
|
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
- self.EvaluateCallback(tab, code)
+ self.EvaluateCallback(tab, code, user_gesture=True)
|
Fix a regression where the user_gesture bit isn't set for ClickElement.
|
## Code Before:
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code)
## Instruction:
Fix a regression where the user_gesture bit isn't set for ClickElement.
## Code After:
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code, user_gesture=True)
|
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
- self.EvaluateCallback(tab, code)
+ self.EvaluateCallback(tab, code, user_gesture=True)
? +++++++++++++++++++
|
37da65953471b5dd0930e102b861878012938701
|
registration/__init__.py
|
registration/__init__.py
|
from django.utils.version import get_version as django_get_version
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
return django_get_version(VERSION) # pragma: no cover
|
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
|
Move import of Django's get_version into django-registration's get_version, to avoid dependency-order problems.
|
Move import of Django's get_version into django-registration's get_version, to avoid dependency-order problems.
|
Python
|
bsd-3-clause
|
lubosz/django-registration,lubosz/django-registration
|
- from django.utils.version import get_version as django_get_version
-
-
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
+ from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
|
Move import of Django's get_version into django-registration's get_version, to avoid dependency-order problems.
|
## Code Before:
from django.utils.version import get_version as django_get_version
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
return django_get_version(VERSION) # pragma: no cover
## Instruction:
Move import of Django's get_version into django-registration's get_version, to avoid dependency-order problems.
## Code After:
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
|
- from django.utils.version import get_version as django_get_version
-
-
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
+ from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
|
c716124b8ede9678ca24eb07f1aa83c1fba9f177
|
doorman/celery_serializer.py
|
doorman/celery_serializer.py
|
from datetime import datetime
from time import mktime
import json
class DJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return {
'__type__': '__datetime__',
'epoch': int(mktime(obj.timetuple()))
}
else:
return json.JSONEncoder.default(self, obj)
def djson_decoder(obj):
if '__type__' in obj:
if obj['__type__'] == '__datetime__':
return datetime.fromtimestamp(obj['epoch'])
return obj
# Encoder function
def djson_dumps(obj):
return json.dumps(obj, cls=DJSONEncoder)
# Decoder function
def djson_loads(obj):
return json.loads(obj, object_hook=djson_decoder)
|
from datetime import datetime
from time import mktime
import json
from doorman.compat import string_types
class DJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return {
'__type__': '__datetime__',
'epoch': int(mktime(obj.timetuple()))
}
else:
return json.JSONEncoder.default(self, obj)
def djson_decoder(obj):
if '__type__' in obj:
if obj['__type__'] == '__datetime__':
return datetime.fromtimestamp(obj['epoch'])
return obj
# Encoder function
def djson_dumps(obj):
return json.dumps(obj, cls=DJSONEncoder)
# Decoder function
def djson_loads(s):
if not isinstance(s, string_types):
s = s.decode('utf-8')
return json.loads(s, object_hook=djson_decoder)
|
Fix custom decoder on Python 3
|
Fix custom decoder on Python 3
|
Python
|
mit
|
mwielgoszewski/doorman,mwielgoszewski/doorman,mwielgoszewski/doorman,mwielgoszewski/doorman
|
from datetime import datetime
from time import mktime
import json
+
+ from doorman.compat import string_types
class DJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return {
'__type__': '__datetime__',
'epoch': int(mktime(obj.timetuple()))
}
else:
return json.JSONEncoder.default(self, obj)
def djson_decoder(obj):
if '__type__' in obj:
if obj['__type__'] == '__datetime__':
return datetime.fromtimestamp(obj['epoch'])
return obj
# Encoder function
def djson_dumps(obj):
return json.dumps(obj, cls=DJSONEncoder)
# Decoder function
- def djson_loads(obj):
+ def djson_loads(s):
+ if not isinstance(s, string_types):
+ s = s.decode('utf-8')
- return json.loads(obj, object_hook=djson_decoder)
+ return json.loads(s, object_hook=djson_decoder)
|
Fix custom decoder on Python 3
|
## Code Before:
from datetime import datetime
from time import mktime
import json
class DJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return {
'__type__': '__datetime__',
'epoch': int(mktime(obj.timetuple()))
}
else:
return json.JSONEncoder.default(self, obj)
def djson_decoder(obj):
if '__type__' in obj:
if obj['__type__'] == '__datetime__':
return datetime.fromtimestamp(obj['epoch'])
return obj
# Encoder function
def djson_dumps(obj):
return json.dumps(obj, cls=DJSONEncoder)
# Decoder function
def djson_loads(obj):
return json.loads(obj, object_hook=djson_decoder)
## Instruction:
Fix custom decoder on Python 3
## Code After:
from datetime import datetime
from time import mktime
import json
from doorman.compat import string_types
class DJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return {
'__type__': '__datetime__',
'epoch': int(mktime(obj.timetuple()))
}
else:
return json.JSONEncoder.default(self, obj)
def djson_decoder(obj):
if '__type__' in obj:
if obj['__type__'] == '__datetime__':
return datetime.fromtimestamp(obj['epoch'])
return obj
# Encoder function
def djson_dumps(obj):
return json.dumps(obj, cls=DJSONEncoder)
# Decoder function
def djson_loads(s):
if not isinstance(s, string_types):
s = s.decode('utf-8')
return json.loads(s, object_hook=djson_decoder)
|
from datetime import datetime
from time import mktime
import json
+
+ from doorman.compat import string_types
class DJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return {
'__type__': '__datetime__',
'epoch': int(mktime(obj.timetuple()))
}
else:
return json.JSONEncoder.default(self, obj)
def djson_decoder(obj):
if '__type__' in obj:
if obj['__type__'] == '__datetime__':
return datetime.fromtimestamp(obj['epoch'])
return obj
# Encoder function
def djson_dumps(obj):
return json.dumps(obj, cls=DJSONEncoder)
# Decoder function
- def djson_loads(obj):
? ^^^
+ def djson_loads(s):
? ^
+ if not isinstance(s, string_types):
+ s = s.decode('utf-8')
- return json.loads(obj, object_hook=djson_decoder)
? ^^^
+ return json.loads(s, object_hook=djson_decoder)
? ^
|
23ec0899eaf60a9dc79f6671461a33eea7e7f464
|
authtools/backends.py
|
authtools/backends.py
|
from django.contrib.auth.backends import ModelBackend
class CaseInsensitiveEmailBackend(ModelBackend):
"""
This authentication backend assumes that usernames are email addresses and simply lowercases
a username before an attempt is made to authenticate said username using Django's ModelBackend.
Example usage:
# In settings.py
AUTHENTICATION_BACKENDS = ('authtools.backends.CaseInsensitiveEmailBackend',)
NOTE:
A word of caution. Use of this backend presupposes a way to ensure that users cannot create
usernames that differ only in case (e.g., [email protected] and [email protected]). Using this backend
in such a system is a huge security risk.
"""
def authenticate(self, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveEmailBackend, self).authenticate(
username=username,
password=password,
**kwargs
)
|
from django.contrib.auth.backends import ModelBackend
class CaseInsensitiveEmailBackendMixin(object):
def authenticate(self, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveEmailBackendMixin, self).authenticate(
username=username,
password=password,
**kwargs
)
class CaseInsensitiveEmailBackend(ModelBackend):
"""
This authentication backend assumes that usernames are email addresses and simply lowercases
a username before an attempt is made to authenticate said username using Django's ModelBackend.
Example usage:
# In settings.py
AUTHENTICATION_BACKENDS = ('authtools.backends.CaseInsensitiveEmailBackend',)
NOTE:
A word of caution. Use of this backend presupposes a way to ensure that users cannot create
usernames that differ only in case (e.g., [email protected] and [email protected]). Using this backend
in such a system is a huge security risk.
"""
def authenticate(self, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveEmailBackend, self).authenticate(
username=username,
password=password,
**kwargs
)
|
Add mixin to make the case-insensitive email auth backend more flexible
|
Add mixin to make the case-insensitive email auth backend more flexible
|
Python
|
bsd-2-clause
|
fusionbox/django-authtools,vuchau/django-authtools,moreati/django-authtools,eevol/django-authtools,kivikakk/django-authtools
|
from django.contrib.auth.backends import ModelBackend
+
+
+ class CaseInsensitiveEmailBackendMixin(object):
+ def authenticate(self, username=None, password=None, **kwargs):
+ if username is not None:
+ username = username.lower()
+
+ return super(CaseInsensitiveEmailBackendMixin, self).authenticate(
+ username=username,
+ password=password,
+ **kwargs
+ )
class CaseInsensitiveEmailBackend(ModelBackend):
"""
This authentication backend assumes that usernames are email addresses and simply lowercases
a username before an attempt is made to authenticate said username using Django's ModelBackend.
Example usage:
# In settings.py
AUTHENTICATION_BACKENDS = ('authtools.backends.CaseInsensitiveEmailBackend',)
NOTE:
A word of caution. Use of this backend presupposes a way to ensure that users cannot create
usernames that differ only in case (e.g., [email protected] and [email protected]). Using this backend
in such a system is a huge security risk.
"""
def authenticate(self, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveEmailBackend, self).authenticate(
username=username,
password=password,
**kwargs
)
|
Add mixin to make the case-insensitive email auth backend more flexible
|
## Code Before:
from django.contrib.auth.backends import ModelBackend
class CaseInsensitiveEmailBackend(ModelBackend):
"""
This authentication backend assumes that usernames are email addresses and simply lowercases
a username before an attempt is made to authenticate said username using Django's ModelBackend.
Example usage:
# In settings.py
AUTHENTICATION_BACKENDS = ('authtools.backends.CaseInsensitiveEmailBackend',)
NOTE:
A word of caution. Use of this backend presupposes a way to ensure that users cannot create
usernames that differ only in case (e.g., [email protected] and [email protected]). Using this backend
in such a system is a huge security risk.
"""
def authenticate(self, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveEmailBackend, self).authenticate(
username=username,
password=password,
**kwargs
)
## Instruction:
Add mixin to make the case-insensitive email auth backend more flexible
## Code After:
from django.contrib.auth.backends import ModelBackend
class CaseInsensitiveEmailBackendMixin(object):
def authenticate(self, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveEmailBackendMixin, self).authenticate(
username=username,
password=password,
**kwargs
)
class CaseInsensitiveEmailBackend(ModelBackend):
"""
This authentication backend assumes that usernames are email addresses and simply lowercases
a username before an attempt is made to authenticate said username using Django's ModelBackend.
Example usage:
# In settings.py
AUTHENTICATION_BACKENDS = ('authtools.backends.CaseInsensitiveEmailBackend',)
NOTE:
A word of caution. Use of this backend presupposes a way to ensure that users cannot create
usernames that differ only in case (e.g., [email protected] and [email protected]). Using this backend
in such a system is a huge security risk.
"""
def authenticate(self, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveEmailBackend, self).authenticate(
username=username,
password=password,
**kwargs
)
|
from django.contrib.auth.backends import ModelBackend
+
+
+ class CaseInsensitiveEmailBackendMixin(object):
+ def authenticate(self, username=None, password=None, **kwargs):
+ if username is not None:
+ username = username.lower()
+
+ return super(CaseInsensitiveEmailBackendMixin, self).authenticate(
+ username=username,
+ password=password,
+ **kwargs
+ )
class CaseInsensitiveEmailBackend(ModelBackend):
"""
This authentication backend assumes that usernames are email addresses and simply lowercases
a username before an attempt is made to authenticate said username using Django's ModelBackend.
Example usage:
# In settings.py
AUTHENTICATION_BACKENDS = ('authtools.backends.CaseInsensitiveEmailBackend',)
NOTE:
A word of caution. Use of this backend presupposes a way to ensure that users cannot create
usernames that differ only in case (e.g., [email protected] and [email protected]). Using this backend
in such a system is a huge security risk.
"""
def authenticate(self, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveEmailBackend, self).authenticate(
username=username,
password=password,
**kwargs
)
|
c5ef250240cbaa894ee84615c5d07a383bd16962
|
fluent_contents/plugins/oembeditem/content_plugins.py
|
fluent_contents/plugins/oembeditem/content_plugins.py
|
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
return ["fluent_contents/plugins/oembed/{type}.html".format(type=instance.type or 'default'), self.render_template]
|
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
import re
re_safe = re.compile(r'[^\w_-]')
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
safe_filename = re_safe.sub('', instance.type or 'default')
return [
"fluent_contents/plugins/oembed/{type}.html".format(type=safe_filename),
self.render_template
]
|
Make sure the OEmbed type can never be used to control filenames.
|
Make sure the OEmbed type can never be used to control filenames.
Minor risk, as it's still a template path, but better be safe then sorry.
|
Python
|
apache-2.0
|
pombredanne/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,jpotterm/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents
|
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
+ import re
+
+ re_safe = re.compile(r'[^\w_-]')
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
- return ["fluent_contents/plugins/oembed/{type}.html".format(type=instance.type or 'default'), self.render_template]
+ safe_filename = re_safe.sub('', instance.type or 'default')
+ return [
+ "fluent_contents/plugins/oembed/{type}.html".format(type=safe_filename),
+ self.render_template
+ ]
|
Make sure the OEmbed type can never be used to control filenames.
|
## Code Before:
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
return ["fluent_contents/plugins/oembed/{type}.html".format(type=instance.type or 'default'), self.render_template]
## Instruction:
Make sure the OEmbed type can never be used to control filenames.
## Code After:
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
import re
re_safe = re.compile(r'[^\w_-]')
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
safe_filename = re_safe.sub('', instance.type or 'default')
return [
"fluent_contents/plugins/oembed/{type}.html".format(type=safe_filename),
self.render_template
]
|
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
+ import re
+
+ re_safe = re.compile(r'[^\w_-]')
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
- return ["fluent_contents/plugins/oembed/{type}.html".format(type=instance.type or 'default'), self.render_template]
+ safe_filename = re_safe.sub('', instance.type or 'default')
+ return [
+ "fluent_contents/plugins/oembed/{type}.html".format(type=safe_filename),
+ self.render_template
+ ]
|
13f9a48166aed2f6d09e1a27c60568d2318ceee2
|
src/ocspdash/custom_columns.py
|
src/ocspdash/custom_columns.py
|
"""Implements custom SQLAlchemy TypeDecorators."""
import uuid
import sqlalchemy.dialects.postgresql
from sqlalchemy.types import BINARY, TypeDecorator
__all__ = [
'UUID',
]
class UUID(TypeDecorator):
"""Platform-independent UUID type.
Uses Postgresql's UUID type, otherwise uses
BINARY(16).
Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type
"""
impl = BINARY
def load_dialect_impl(self, dialect):
if dialect.name == 'postgresql':
return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID())
return dialect.type_descriptor(BINARY)
def process_bind_param(self, value, dialect):
if value is None:
return
if dialect.name == 'postgresql':
return str(value)
if isinstance(value, uuid.UUID):
# hex string
return value.bytes
value_uuid = uuid.UUID(value)
return value_uuid.bytes
def process_result_value(self, value, dialect):
if value is None:
return
return uuid.UUID(bytes=value)
|
"""Implements custom SQLAlchemy TypeDecorators."""
import uuid
import sqlalchemy.dialects.postgresql
from sqlalchemy.types import BINARY, TypeDecorator
__all__ = [
'UUID',
]
class UUID(TypeDecorator):
"""Platform-independent UUID type.
Uses Postgresql's UUID type, otherwise uses
BINARY(16).
Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type
"""
impl = BINARY
def load_dialect_impl(self, dialect):
if dialect.name == 'postgresql':
return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID())
return dialect.type_descriptor(BINARY)
def process_bind_param(self, value, dialect):
if value is None:
return
if dialect.name == 'postgresql':
return str(value)
if isinstance(value, uuid.UUID):
# raw UUID bytes
return value.bytes
value_uuid = uuid.UUID(value)
return value_uuid.bytes
def process_result_value(self, value, dialect):
if value is None:
return
if dialect.name == 'postgresql':
return uuid.UUID(value)
return uuid.UUID(bytes=value)
|
Change the custom UUID column to work right
|
Change the custom UUID column to work right
|
Python
|
mit
|
scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash
|
"""Implements custom SQLAlchemy TypeDecorators."""
import uuid
import sqlalchemy.dialects.postgresql
from sqlalchemy.types import BINARY, TypeDecorator
__all__ = [
'UUID',
]
class UUID(TypeDecorator):
"""Platform-independent UUID type.
Uses Postgresql's UUID type, otherwise uses
BINARY(16).
Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type
"""
impl = BINARY
def load_dialect_impl(self, dialect):
if dialect.name == 'postgresql':
return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID())
return dialect.type_descriptor(BINARY)
def process_bind_param(self, value, dialect):
if value is None:
return
if dialect.name == 'postgresql':
return str(value)
if isinstance(value, uuid.UUID):
- # hex string
+ # raw UUID bytes
return value.bytes
value_uuid = uuid.UUID(value)
return value_uuid.bytes
def process_result_value(self, value, dialect):
if value is None:
return
+ if dialect.name == 'postgresql':
+ return uuid.UUID(value)
+
return uuid.UUID(bytes=value)
|
Change the custom UUID column to work right
|
## Code Before:
"""Implements custom SQLAlchemy TypeDecorators."""
import uuid
import sqlalchemy.dialects.postgresql
from sqlalchemy.types import BINARY, TypeDecorator
__all__ = [
'UUID',
]
class UUID(TypeDecorator):
"""Platform-independent UUID type.
Uses Postgresql's UUID type, otherwise uses
BINARY(16).
Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type
"""
impl = BINARY
def load_dialect_impl(self, dialect):
if dialect.name == 'postgresql':
return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID())
return dialect.type_descriptor(BINARY)
def process_bind_param(self, value, dialect):
if value is None:
return
if dialect.name == 'postgresql':
return str(value)
if isinstance(value, uuid.UUID):
# hex string
return value.bytes
value_uuid = uuid.UUID(value)
return value_uuid.bytes
def process_result_value(self, value, dialect):
if value is None:
return
return uuid.UUID(bytes=value)
## Instruction:
Change the custom UUID column to work right
## Code After:
"""Implements custom SQLAlchemy TypeDecorators."""
import uuid
import sqlalchemy.dialects.postgresql
from sqlalchemy.types import BINARY, TypeDecorator
__all__ = [
'UUID',
]
class UUID(TypeDecorator):
"""Platform-independent UUID type.
Uses Postgresql's UUID type, otherwise uses
BINARY(16).
Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type
"""
impl = BINARY
def load_dialect_impl(self, dialect):
if dialect.name == 'postgresql':
return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID())
return dialect.type_descriptor(BINARY)
def process_bind_param(self, value, dialect):
if value is None:
return
if dialect.name == 'postgresql':
return str(value)
if isinstance(value, uuid.UUID):
# raw UUID bytes
return value.bytes
value_uuid = uuid.UUID(value)
return value_uuid.bytes
def process_result_value(self, value, dialect):
if value is None:
return
if dialect.name == 'postgresql':
return uuid.UUID(value)
return uuid.UUID(bytes=value)
|
"""Implements custom SQLAlchemy TypeDecorators."""
import uuid
import sqlalchemy.dialects.postgresql
from sqlalchemy.types import BINARY, TypeDecorator
__all__ = [
'UUID',
]
class UUID(TypeDecorator):
"""Platform-independent UUID type.
Uses Postgresql's UUID type, otherwise uses
BINARY(16).
Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type
"""
impl = BINARY
def load_dialect_impl(self, dialect):
if dialect.name == 'postgresql':
return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID())
return dialect.type_descriptor(BINARY)
def process_bind_param(self, value, dialect):
if value is None:
return
if dialect.name == 'postgresql':
return str(value)
if isinstance(value, uuid.UUID):
- # hex string
+ # raw UUID bytes
return value.bytes
value_uuid = uuid.UUID(value)
return value_uuid.bytes
def process_result_value(self, value, dialect):
if value is None:
return
+ if dialect.name == 'postgresql':
+ return uuid.UUID(value)
+
return uuid.UUID(bytes=value)
|
f9aae4320522af94dde78bac0c30e909ef4ef4e2
|
blockbuster/bb_dbconnector_factory.py
|
blockbuster/bb_dbconnector_factory.py
|
import logging
import bb_dbconnector_pg
log = logging.getLogger('bb_log.' + __name__)
class DBConnectorInterfaceFactory:
def __init__(self):
pass
@staticmethod
def create():
return bb_dbconnector_pg.PostgresConnector()
|
import bb_dbconnector_pg
class DBConnectorInterfaceFactory:
def __init__(self):
pass
@staticmethod
def create():
return bb_dbconnector_pg.PostgresConnector()
|
Remove logger as not used in module
|
Remove logger as not used in module
|
Python
|
mit
|
mattstibbs/blockbuster-server,mattstibbs/blockbuster-server
|
- import logging
import bb_dbconnector_pg
-
- log = logging.getLogger('bb_log.' + __name__)
-
class DBConnectorInterfaceFactory:
def __init__(self):
pass
@staticmethod
def create():
return bb_dbconnector_pg.PostgresConnector()
|
Remove logger as not used in module
|
## Code Before:
import logging
import bb_dbconnector_pg
log = logging.getLogger('bb_log.' + __name__)
class DBConnectorInterfaceFactory:
def __init__(self):
pass
@staticmethod
def create():
return bb_dbconnector_pg.PostgresConnector()
## Instruction:
Remove logger as not used in module
## Code After:
import bb_dbconnector_pg
class DBConnectorInterfaceFactory:
def __init__(self):
pass
@staticmethod
def create():
return bb_dbconnector_pg.PostgresConnector()
|
- import logging
import bb_dbconnector_pg
-
- log = logging.getLogger('bb_log.' + __name__)
-
class DBConnectorInterfaceFactory:
def __init__(self):
pass
@staticmethod
def create():
return bb_dbconnector_pg.PostgresConnector()
|
ef75047fa9bd0d4bc5dd6c263f399f446827daab
|
radar/lib/models/__init__.py
|
radar/lib/models/__init__.py
|
from radar.lib.models.cohorts import *
from radar.lib.models.common import *
from radar.lib.models.comorbidities import *
from radar.lib.models.diagnosis import *
from radar.lib.models.dialysis import *
from radar.lib.models.data_sources import *
from radar.lib.models.genetics import *
from radar.lib.models.hospitalisations import *
from radar.lib.models.results import *
from radar.lib.models.medications import *
from radar.lib.models.organisations import *
from radar.lib.models.posts import *
from radar.lib.models.pathology import *
from radar.lib.models.patients import *
from radar.lib.models.patient_addresses import *
from radar.lib.models.patient_aliases import *
from radar.lib.models.patient_demographics import *
from radar.lib.models.patient_numbers import *
from radar.lib.models.plasmapheresis import *
from radar.lib.models.renal_imaging import *
from radar.lib.models.salt_wasting import *
from radar.lib.models.transplants import *
from radar.lib.models.users import *
|
from radar.lib.models.cohorts import *
from radar.lib.models.common import *
from radar.lib.models.comorbidities import *
from radar.lib.models.diagnosis import *
from radar.lib.models.dialysis import *
from radar.lib.models.data_sources import *
from radar.lib.models.family_history import *
from radar.lib.models.genetics import *
from radar.lib.models.hospitalisations import *
from radar.lib.models.results import *
from radar.lib.models.medications import *
from radar.lib.models.organisations import *
from radar.lib.models.posts import *
from radar.lib.models.pathology import *
from radar.lib.models.patients import *
from radar.lib.models.patient_addresses import *
from radar.lib.models.patient_aliases import *
from radar.lib.models.patient_demographics import *
from radar.lib.models.patient_numbers import *
from radar.lib.models.plasmapheresis import *
from radar.lib.models.renal_imaging import *
from radar.lib.models.salt_wasting import *
from radar.lib.models.transplants import *
from radar.lib.models.users import *
|
Add family history and pathology client-side
|
Add family history and pathology client-side
|
Python
|
agpl-3.0
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
from radar.lib.models.cohorts import *
from radar.lib.models.common import *
from radar.lib.models.comorbidities import *
from radar.lib.models.diagnosis import *
from radar.lib.models.dialysis import *
from radar.lib.models.data_sources import *
+ from radar.lib.models.family_history import *
from radar.lib.models.genetics import *
from radar.lib.models.hospitalisations import *
from radar.lib.models.results import *
from radar.lib.models.medications import *
from radar.lib.models.organisations import *
from radar.lib.models.posts import *
from radar.lib.models.pathology import *
from radar.lib.models.patients import *
from radar.lib.models.patient_addresses import *
from radar.lib.models.patient_aliases import *
from radar.lib.models.patient_demographics import *
from radar.lib.models.patient_numbers import *
from radar.lib.models.plasmapheresis import *
from radar.lib.models.renal_imaging import *
from radar.lib.models.salt_wasting import *
from radar.lib.models.transplants import *
from radar.lib.models.users import *
|
Add family history and pathology client-side
|
## Code Before:
from radar.lib.models.cohorts import *
from radar.lib.models.common import *
from radar.lib.models.comorbidities import *
from radar.lib.models.diagnosis import *
from radar.lib.models.dialysis import *
from radar.lib.models.data_sources import *
from radar.lib.models.genetics import *
from radar.lib.models.hospitalisations import *
from radar.lib.models.results import *
from radar.lib.models.medications import *
from radar.lib.models.organisations import *
from radar.lib.models.posts import *
from radar.lib.models.pathology import *
from radar.lib.models.patients import *
from radar.lib.models.patient_addresses import *
from radar.lib.models.patient_aliases import *
from radar.lib.models.patient_demographics import *
from radar.lib.models.patient_numbers import *
from radar.lib.models.plasmapheresis import *
from radar.lib.models.renal_imaging import *
from radar.lib.models.salt_wasting import *
from radar.lib.models.transplants import *
from radar.lib.models.users import *
## Instruction:
Add family history and pathology client-side
## Code After:
from radar.lib.models.cohorts import *
from radar.lib.models.common import *
from radar.lib.models.comorbidities import *
from radar.lib.models.diagnosis import *
from radar.lib.models.dialysis import *
from radar.lib.models.data_sources import *
from radar.lib.models.family_history import *
from radar.lib.models.genetics import *
from radar.lib.models.hospitalisations import *
from radar.lib.models.results import *
from radar.lib.models.medications import *
from radar.lib.models.organisations import *
from radar.lib.models.posts import *
from radar.lib.models.pathology import *
from radar.lib.models.patients import *
from radar.lib.models.patient_addresses import *
from radar.lib.models.patient_aliases import *
from radar.lib.models.patient_demographics import *
from radar.lib.models.patient_numbers import *
from radar.lib.models.plasmapheresis import *
from radar.lib.models.renal_imaging import *
from radar.lib.models.salt_wasting import *
from radar.lib.models.transplants import *
from radar.lib.models.users import *
|
from radar.lib.models.cohorts import *
from radar.lib.models.common import *
from radar.lib.models.comorbidities import *
from radar.lib.models.diagnosis import *
from radar.lib.models.dialysis import *
from radar.lib.models.data_sources import *
+ from radar.lib.models.family_history import *
from radar.lib.models.genetics import *
from radar.lib.models.hospitalisations import *
from radar.lib.models.results import *
from radar.lib.models.medications import *
from radar.lib.models.organisations import *
from radar.lib.models.posts import *
from radar.lib.models.pathology import *
from radar.lib.models.patients import *
from radar.lib.models.patient_addresses import *
from radar.lib.models.patient_aliases import *
from radar.lib.models.patient_demographics import *
from radar.lib.models.patient_numbers import *
from radar.lib.models.plasmapheresis import *
from radar.lib.models.renal_imaging import *
from radar.lib.models.salt_wasting import *
from radar.lib.models.transplants import *
from radar.lib.models.users import *
|
47f1d3bf2ef53fa9fef9eff46497ca02f366e3fb
|
nap/auth.py
|
nap/auth.py
|
from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
def permit(test_func):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
return http.Forbidden()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(self, *args, **kwargs):
return self.request.user.groups.filter(name__in=groups).exists()
return permit(in_groups)
|
from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
def permit(test_func, response_class=http.Forbidden):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
return response_class()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(self, *args, **kwargs):
return self.request.user.groups.filter(name__in=groups).exists()
return permit(in_groups)
|
Allow control of response type for failing permit check
|
Allow control of response type for failing permit check
|
Python
|
bsd-3-clause
|
limbera/django-nap
|
from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
- def permit(test_func):
+ def permit(test_func, response_class=http.Forbidden):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
- return http.Forbidden()
+ return response_class()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(self, *args, **kwargs):
return self.request.user.groups.filter(name__in=groups).exists()
return permit(in_groups)
|
Allow control of response type for failing permit check
|
## Code Before:
from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
def permit(test_func):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
return http.Forbidden()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(self, *args, **kwargs):
return self.request.user.groups.filter(name__in=groups).exists()
return permit(in_groups)
## Instruction:
Allow control of response type for failing permit check
## Code After:
from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
def permit(test_func, response_class=http.Forbidden):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
return response_class()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(self, *args, **kwargs):
return self.request.user.groups.filter(name__in=groups).exists()
return permit(in_groups)
|
from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
- def permit(test_func):
+ def permit(test_func, response_class=http.Forbidden):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
- return http.Forbidden()
+ return response_class()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(self, *args, **kwargs):
return self.request.user.groups.filter(name__in=groups).exists()
return permit(in_groups)
|
ad7d331868706c97caa0bf0abff88d6ab5537d8d
|
pyramid_skosprovider/__init__.py
|
pyramid_skosprovider/__init__.py
|
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
|
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
config.add_request_method(get_skos_registry, 'skos_registry', reify=True)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
|
Add skos_registry to the request.
|
Add skos_registry to the request.
Add the skos_registry to the request through the add_request_method
directive.
|
Python
|
mit
|
koenedaele/pyramid_skosprovider
|
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
+ config.add_request_method(get_skos_registry, 'skos_registry', reify=True)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
|
Add skos_registry to the request.
|
## Code Before:
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
## Instruction:
Add skos_registry to the request.
## Code After:
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
config.add_request_method(get_skos_registry, 'skos_registry', reify=True)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
|
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
+ config.add_request_method(get_skos_registry, 'skos_registry', reify=True)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
|
1e082f8c39dd1a1d41064f522db10478b0c820e1
|
icekit/page_types/layout_page/page_type_plugins.py
|
icekit/page_types/layout_page/page_type_plugins.py
|
from django.conf.urls import patterns, url
from fluent_pages.extensions import page_type_pool
from fluent_pages.integration.fluent_contents.page_type_plugins import FluentContentsPagePlugin
from fluent_pages.models import UrlNode
from . import admin, models
# Register this plugin to the page plugin pool.
@page_type_pool.register
class LayoutPagePlugin(FluentContentsPagePlugin):
model = models.LayoutPage
model_admin = admin.LayoutPageAdmin
|
from django.conf.urls import patterns, url
from fluent_pages.extensions import page_type_pool
from fluent_pages.integration.fluent_contents.page_type_plugins import FluentContentsPagePlugin
from fluent_pages.models import UrlNode
from . import admin, models
# Register this plugin to the page plugin pool.
@page_type_pool.register
class LayoutPagePlugin(FluentContentsPagePlugin):
model = models.LayoutPage
model_admin = admin.LayoutPageAdmin
def get_render_template(self, request, fluentpage, **kwargs):
# Allow subclasses to easily override it by specifying `render_template` after all.
# The default, is to use the template_path from the layout object.
return self.render_template or fluentpage.layout.template_name
|
Add smart template render method to LayoutPage
|
Add smart template render method to LayoutPage
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
from django.conf.urls import patterns, url
from fluent_pages.extensions import page_type_pool
from fluent_pages.integration.fluent_contents.page_type_plugins import FluentContentsPagePlugin
from fluent_pages.models import UrlNode
from . import admin, models
# Register this plugin to the page plugin pool.
@page_type_pool.register
class LayoutPagePlugin(FluentContentsPagePlugin):
model = models.LayoutPage
model_admin = admin.LayoutPageAdmin
+ def get_render_template(self, request, fluentpage, **kwargs):
+ # Allow subclasses to easily override it by specifying `render_template` after all.
+ # The default, is to use the template_path from the layout object.
+ return self.render_template or fluentpage.layout.template_name
+
|
Add smart template render method to LayoutPage
|
## Code Before:
from django.conf.urls import patterns, url
from fluent_pages.extensions import page_type_pool
from fluent_pages.integration.fluent_contents.page_type_plugins import FluentContentsPagePlugin
from fluent_pages.models import UrlNode
from . import admin, models
# Register this plugin to the page plugin pool.
@page_type_pool.register
class LayoutPagePlugin(FluentContentsPagePlugin):
model = models.LayoutPage
model_admin = admin.LayoutPageAdmin
## Instruction:
Add smart template render method to LayoutPage
## Code After:
from django.conf.urls import patterns, url
from fluent_pages.extensions import page_type_pool
from fluent_pages.integration.fluent_contents.page_type_plugins import FluentContentsPagePlugin
from fluent_pages.models import UrlNode
from . import admin, models
# Register this plugin to the page plugin pool.
@page_type_pool.register
class LayoutPagePlugin(FluentContentsPagePlugin):
model = models.LayoutPage
model_admin = admin.LayoutPageAdmin
def get_render_template(self, request, fluentpage, **kwargs):
# Allow subclasses to easily override it by specifying `render_template` after all.
# The default, is to use the template_path from the layout object.
return self.render_template or fluentpage.layout.template_name
|
from django.conf.urls import patterns, url
from fluent_pages.extensions import page_type_pool
from fluent_pages.integration.fluent_contents.page_type_plugins import FluentContentsPagePlugin
from fluent_pages.models import UrlNode
from . import admin, models
# Register this plugin to the page plugin pool.
@page_type_pool.register
class LayoutPagePlugin(FluentContentsPagePlugin):
model = models.LayoutPage
model_admin = admin.LayoutPageAdmin
+ def get_render_template(self, request, fluentpage, **kwargs):
+ # Allow subclasses to easily override it by specifying `render_template` after all.
+ # The default, is to use the template_path from the layout object.
+ return self.render_template or fluentpage.layout.template_name
+
|
f415a411f748ce5a8eb142d862970e00d0267004
|
tests/test_environment.py
|
tests/test_environment.py
|
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
|
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
def test_env_should_come_with_jinja2_time_extension():
env = StrictEnvironment(keep_trailing_newline=True)
assert 'jinja2_time.jinja2_time.TimeExtension' in env.extensions
|
Add a simple test to make sure cookiecutter comes with jinja2_time
|
Add a simple test to make sure cookiecutter comes with jinja2_time
|
Python
|
bsd-3-clause
|
Springerle/cookiecutter,dajose/cookiecutter,luzfcb/cookiecutter,michaeljoseph/cookiecutter,willingc/cookiecutter,luzfcb/cookiecutter,michaeljoseph/cookiecutter,Springerle/cookiecutter,dajose/cookiecutter,pjbull/cookiecutter,audreyr/cookiecutter,audreyr/cookiecutter,willingc/cookiecutter,pjbull/cookiecutter,hackebrot/cookiecutter,terryjbates/cookiecutter,hackebrot/cookiecutter,terryjbates/cookiecutter,stevepiercy/cookiecutter,stevepiercy/cookiecutter
|
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
+
+ def test_env_should_come_with_jinja2_time_extension():
+ env = StrictEnvironment(keep_trailing_newline=True)
+ assert 'jinja2_time.jinja2_time.TimeExtension' in env.extensions
+
|
Add a simple test to make sure cookiecutter comes with jinja2_time
|
## Code Before:
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
## Instruction:
Add a simple test to make sure cookiecutter comes with jinja2_time
## Code After:
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
def test_env_should_come_with_jinja2_time_extension():
env = StrictEnvironment(keep_trailing_newline=True)
assert 'jinja2_time.jinja2_time.TimeExtension' in env.extensions
|
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
+
+
+ def test_env_should_come_with_jinja2_time_extension():
+ env = StrictEnvironment(keep_trailing_newline=True)
+ assert 'jinja2_time.jinja2_time.TimeExtension' in env.extensions
|
5fbf410e0042c82e524b3b08276b2d628d00b3c6
|
stickytape/prelude.py
|
stickytape/prelude.py
|
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
import sys
sys.path.insert(0, __stickytape_working_dir)
|
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
import sys as __stickytape_sys
__stickytape_sys.path.insert(0, __stickytape_working_dir)
|
Undo accidental global leakage of sys
|
Undo accidental global leakage of sys
|
Python
|
bsd-2-clause
|
mwilliamson/stickytape
|
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
- import sys
+ import sys as __stickytape_sys
- sys.path.insert(0, __stickytape_working_dir)
+ __stickytape_sys.path.insert(0, __stickytape_working_dir)
|
Undo accidental global leakage of sys
|
## Code Before:
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
import sys
sys.path.insert(0, __stickytape_working_dir)
## Instruction:
Undo accidental global leakage of sys
## Code After:
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
import sys as __stickytape_sys
__stickytape_sys.path.insert(0, __stickytape_working_dir)
|
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
- import sys
+ import sys as __stickytape_sys
- sys.path.insert(0, __stickytape_working_dir)
+ __stickytape_sys.path.insert(0, __stickytape_working_dir)
? +++++++++++++
|
1fd43c6b87db9599c73b7cb26856e99404b2e0f7
|
corehq/apps/data_interfaces/tests/test_xform_management.py
|
corehq/apps/data_interfaces/tests/test_xform_management.py
|
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
from corehq.pillows.mappings.xform_mapping import XFORM_INDEX_INFO
from corehq.util.elastic import reset_es_index
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
reset_es_index(XFORM_INDEX_INFO)
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
Fix ES index setup in XFormManagementTest
|
Fix ES index setup in XFormManagementTest
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
+ from corehq.pillows.mappings.xform_mapping import XFORM_INDEX_INFO
+ from corehq.util.elastic import reset_es_index
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
+ reset_es_index(XFORM_INDEX_INFO)
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
Fix ES index setup in XFormManagementTest
|
## Code Before:
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
## Instruction:
Fix ES index setup in XFormManagementTest
## Code After:
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
from corehq.pillows.mappings.xform_mapping import XFORM_INDEX_INFO
from corehq.util.elastic import reset_es_index
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
reset_es_index(XFORM_INDEX_INFO)
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
+ from corehq.pillows.mappings.xform_mapping import XFORM_INDEX_INFO
+ from corehq.util.elastic import reset_es_index
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
+ reset_es_index(XFORM_INDEX_INFO)
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
58316a5823e2e136b2b5687d4aef323ad8a86cee
|
senlin/tests/functional/drivers/openstack/sdk.py
|
senlin/tests/functional/drivers/openstack/sdk.py
|
class FakeResourceObject(object):
'''Generate a fake SDK resource object based on given dictionary'''
def __init__(self, params):
for key in params:
setattr(self, key, params[key])
|
class FakeResourceObject(object):
'''Generate a fake SDK resource object based on given dictionary'''
def __init__(self, params):
for key in params:
setattr(self, key, params[key])
def to_dict(self):
data = {
'image': {
'id': 'FAKE_IMAGE'
},
'flavor': {
'id': 'FAKE_FLAVOR'
},
'addresses': {
'private': [
{
'OS-EXT-IPS:type': 'fixed',
'addr': '10.0.0.5',
'version': 4
}
]
},
'security_groups': [
{
'name': 'default'
}
]
}
return data
|
Add to_dict() method for faked resource
|
Add to_dict() method for faked resource
When testing node_get with details, we expect the profile to return a
resource that has a to_dict() method. The existing faked resource
doesn't support this yet. This patch fixes it.
Change-Id: I52e0dad74a1140f8233280ff10a9c14ff1760f72
|
Python
|
apache-2.0
|
stackforge/senlin,openstack/senlin,openstack/senlin,openstack/senlin,tengqm/senlin-container,stackforge/senlin,tengqm/senlin-container
|
class FakeResourceObject(object):
'''Generate a fake SDK resource object based on given dictionary'''
def __init__(self, params):
for key in params:
setattr(self, key, params[key])
+ def to_dict(self):
+ data = {
+ 'image': {
+ 'id': 'FAKE_IMAGE'
+ },
+ 'flavor': {
+ 'id': 'FAKE_FLAVOR'
+ },
+ 'addresses': {
+ 'private': [
+ {
+ 'OS-EXT-IPS:type': 'fixed',
+ 'addr': '10.0.0.5',
+ 'version': 4
+ }
+ ]
+ },
+ 'security_groups': [
+ {
+ 'name': 'default'
+ }
+ ]
+ }
+
+ return data
+
|
Add to_dict() method for faked resource
|
## Code Before:
class FakeResourceObject(object):
'''Generate a fake SDK resource object based on given dictionary'''
def __init__(self, params):
for key in params:
setattr(self, key, params[key])
## Instruction:
Add to_dict() method for faked resource
## Code After:
class FakeResourceObject(object):
'''Generate a fake SDK resource object based on given dictionary'''
def __init__(self, params):
for key in params:
setattr(self, key, params[key])
def to_dict(self):
data = {
'image': {
'id': 'FAKE_IMAGE'
},
'flavor': {
'id': 'FAKE_FLAVOR'
},
'addresses': {
'private': [
{
'OS-EXT-IPS:type': 'fixed',
'addr': '10.0.0.5',
'version': 4
}
]
},
'security_groups': [
{
'name': 'default'
}
]
}
return data
|
class FakeResourceObject(object):
'''Generate a fake SDK resource object based on given dictionary'''
def __init__(self, params):
for key in params:
setattr(self, key, params[key])
+
+ def to_dict(self):
+ data = {
+ 'image': {
+ 'id': 'FAKE_IMAGE'
+ },
+ 'flavor': {
+ 'id': 'FAKE_FLAVOR'
+ },
+ 'addresses': {
+ 'private': [
+ {
+ 'OS-EXT-IPS:type': 'fixed',
+ 'addr': '10.0.0.5',
+ 'version': 4
+ }
+ ]
+ },
+ 'security_groups': [
+ {
+ 'name': 'default'
+ }
+ ]
+ }
+
+ return data
|
698a3fe81a15b40b95836426f9292365f9f57c9c
|
cartoframes/core/cartodataframe.py
|
cartoframes/core/cartodataframe.py
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
Add a wrapper for from_file/from_features methods
|
Add a wrapper for from_file/from_features methods
|
Python
|
bsd-3-clause
|
CartoDB/cartoframes,CartoDB/cartoframes
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
+ @classmethod
+ def from_file(cls, filename, **kwargs):
+ gdf = GeoDataFrame.from_file(filename, **kwargs)
+ return cls(gdf)
+
+ @classmethod
+ def from_features(cls, features, **kwargs):
+ gdf = GeoDataFrame.from_features(features, **kwargs)
+ return cls(gdf)
+
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
Add a wrapper for from_file/from_features methods
|
## Code Before:
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
## Instruction:
Add a wrapper for from_file/from_features methods
## Code After:
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
+ @classmethod
+ def from_file(cls, filename, **kwargs):
+ gdf = GeoDataFrame.from_file(filename, **kwargs)
+ return cls(gdf)
+
+ @classmethod
+ def from_features(cls, features, **kwargs):
+ gdf = GeoDataFrame.from_features(features, **kwargs)
+ return cls(gdf)
+
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
b1890ccd9946054cde25bbd511e317ec0b844b9a
|
webserver/hermes/models.py
|
webserver/hermes/models.py
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
def __str__(self):
return self.team.name
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
Add str method to TeamStats
|
Add str method to TeamStats
|
Python
|
bsd-3-clause
|
siggame/webserver,siggame/webserver,siggame/webserver
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
+ def __str__(self):
+ return self.team.name
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
Add str method to TeamStats
|
## Code Before:
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
## Instruction:
Add str method to TeamStats
## Code After:
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
def __str__(self):
return self.team.name
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
+ def __str__(self):
+ return self.team.name
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
cf5fb07651099e38e6487eae641da07feda40b05
|
numba/tests/test_api.py
|
numba/tests/test_api.py
|
import numba
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
if __name__ == '__main__':
unittest.main()
|
import numba
from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
class TestJitDecorator(TestCase):
"""
Test the jit and njit decorators
"""
def test_jit_nopython_forceobj(self):
with self.assertRaises(ValueError):
jit(nopython=True, forceobj=True)
def py_func(x):
return(x)
jit_func = jit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = jit(forceobj=True)(py_func)
jit_func(1)
self.assertFalse(jit_func.nopython_signatures)
def test_njit_nopython_forceobj(self):
with self.assertWarns(RuntimeWarning):
njit(forceobj=True)
with self.assertWarns(RuntimeWarning):
njit(nopython=True)
def py_func(x):
return(x)
jit_func = njit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = njit(forceobj=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
if __name__ == '__main__':
unittest.main()
|
Add testcases for jit and njit with forceobj and nopython
|
Add testcases for jit and njit with forceobj and nopython
|
Python
|
bsd-2-clause
|
numba/numba,cpcloud/numba,seibert/numba,stuartarchibald/numba,sklam/numba,seibert/numba,stonebig/numba,IntelLabs/numba,cpcloud/numba,stuartarchibald/numba,IntelLabs/numba,IntelLabs/numba,numba/numba,stonebig/numba,stonebig/numba,IntelLabs/numba,seibert/numba,stonebig/numba,gmarkall/numba,numba/numba,sklam/numba,IntelLabs/numba,stuartarchibald/numba,seibert/numba,seibert/numba,sklam/numba,stonebig/numba,sklam/numba,stuartarchibald/numba,numba/numba,cpcloud/numba,stuartarchibald/numba,cpcloud/numba,gmarkall/numba,gmarkall/numba,gmarkall/numba,cpcloud/numba,gmarkall/numba,sklam/numba,numba/numba
|
import numba
+ from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
+ class TestJitDecorator(TestCase):
+ """
+ Test the jit and njit decorators
+ """
+ def test_jit_nopython_forceobj(self):
+ with self.assertRaises(ValueError):
+ jit(nopython=True, forceobj=True)
+
+ def py_func(x):
+ return(x)
+
+ jit_func = jit(nopython=True)(py_func)
+ jit_func(1)
+ self.assertTrue(jit_func.nopython_signatures)
+
+ jit_func = jit(forceobj=True)(py_func)
+ jit_func(1)
+ self.assertFalse(jit_func.nopython_signatures)
+
+
+
+ def test_njit_nopython_forceobj(self):
+ with self.assertWarns(RuntimeWarning):
+ njit(forceobj=True)
+
+ with self.assertWarns(RuntimeWarning):
+ njit(nopython=True)
+
+ def py_func(x):
+ return(x)
+
+ jit_func = njit(nopython=True)(py_func)
+ jit_func(1)
+ self.assertTrue(jit_func.nopython_signatures)
+
+ jit_func = njit(forceobj=True)(py_func)
+ jit_func(1)
+ self.assertTrue(jit_func.nopython_signatures)
+
+
if __name__ == '__main__':
unittest.main()
|
Add testcases for jit and njit with forceobj and nopython
|
## Code Before:
import numba
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
if __name__ == '__main__':
unittest.main()
## Instruction:
Add testcases for jit and njit with forceobj and nopython
## Code After:
import numba
from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
class TestJitDecorator(TestCase):
"""
Test the jit and njit decorators
"""
def test_jit_nopython_forceobj(self):
with self.assertRaises(ValueError):
jit(nopython=True, forceobj=True)
def py_func(x):
return(x)
jit_func = jit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = jit(forceobj=True)(py_func)
jit_func(1)
self.assertFalse(jit_func.nopython_signatures)
def test_njit_nopython_forceobj(self):
with self.assertWarns(RuntimeWarning):
njit(forceobj=True)
with self.assertWarns(RuntimeWarning):
njit(nopython=True)
def py_func(x):
return(x)
jit_func = njit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = njit(forceobj=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
if __name__ == '__main__':
unittest.main()
|
import numba
+ from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
+ class TestJitDecorator(TestCase):
+ """
+ Test the jit and njit decorators
+ """
+ def test_jit_nopython_forceobj(self):
+ with self.assertRaises(ValueError):
+ jit(nopython=True, forceobj=True)
+
+ def py_func(x):
+ return(x)
+
+ jit_func = jit(nopython=True)(py_func)
+ jit_func(1)
+ self.assertTrue(jit_func.nopython_signatures)
+
+ jit_func = jit(forceobj=True)(py_func)
+ jit_func(1)
+ self.assertFalse(jit_func.nopython_signatures)
+
+
+
+ def test_njit_nopython_forceobj(self):
+ with self.assertWarns(RuntimeWarning):
+ njit(forceobj=True)
+
+ with self.assertWarns(RuntimeWarning):
+ njit(nopython=True)
+
+ def py_func(x):
+ return(x)
+
+ jit_func = njit(nopython=True)(py_func)
+ jit_func(1)
+ self.assertTrue(jit_func.nopython_signatures)
+
+ jit_func = njit(forceobj=True)(py_func)
+ jit_func(1)
+ self.assertTrue(jit_func.nopython_signatures)
+
+
if __name__ == '__main__':
unittest.main()
|
70323d2cc7c568fecda66adb0e8ace1922b15b8f
|
recipes/graphviz/run_test.py
|
recipes/graphviz/run_test.py
|
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
|
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
Add tests for svg and pdf on Windows
|
Add tests for svg and pdf on Windows
|
Python
|
bsd-3-clause
|
cpaulik/staged-recipes,jerowe/staged-recipes,cpaulik/staged-recipes,asmeurer/staged-recipes,hajapy/staged-recipes,guillochon/staged-recipes,richardotis/staged-recipes,glemaitre/staged-recipes,kwilcox/staged-recipes,patricksnape/staged-recipes,pstjohn/staged-recipes,johannesring/staged-recipes,caspervdw/staged-recipes,petrushy/staged-recipes,sannykr/staged-recipes,isuruf/staged-recipes,birdsarah/staged-recipes,vamega/staged-recipes,sodre/staged-recipes,Cashalow/staged-recipes,khallock/staged-recipes,rmcgibbo/staged-recipes,Juanlu001/staged-recipes,tylere/staged-recipes,scopatz/staged-recipes,vamega/staged-recipes,Savvysherpa/staged-recipes,tylere/staged-recipes,dharhas/staged-recipes,dfroger/staged-recipes,nicoddemus/staged-recipes,JohnGreeley/staged-recipes,shadowwalkersb/staged-recipes,jerowe/staged-recipes,OpenPIV/staged-recipes,jjhelmus/staged-recipes,NOAA-ORR-ERD/staged-recipes,jakirkham/staged-recipes,JohnGreeley/staged-recipes,richardotis/staged-recipes,jakirkham/staged-recipes,data-exp-lab/staged-recipes,ceholden/staged-recipes,sodre/staged-recipes,NOAA-ORR-ERD/staged-recipes,igortg/staged-recipes,hadim/staged-recipes,grlee77/staged-recipes,nicoddemus/staged-recipes,dschreij/staged-recipes,chohner/staged-recipes,rvalieris/staged-recipes,jcb91/staged-recipes,valgur/staged-recipes,larray-project/staged-recipes,birdsarah/staged-recipes,rvalieris/staged-recipes,benvandyke/staged-recipes,goanpeca/staged-recipes,isuruf/staged-recipes,mcernak/staged-recipes,jochym/staged-recipes,sannykr/staged-recipes,data-exp-lab/staged-recipes,petrushy/staged-recipes,sodre/staged-recipes,Juanlu001/staged-recipes,stuertz/staged-recipes,planetarypy/staged-recipes,chrisburr/staged-recipes,mariusvniekerk/staged-recipes,jochym/staged-recipes,chrisburr/staged-recipes,stuertz/staged-recipes,gqmelo/staged-recipes,ocefpaf/staged-recipes,mcernak/staged-recipes,atedstone/staged-recipes,johannesring/staged-recipes,basnijholt/staged-recipes,mariusvniekerk/staged-recipes,patricksnape/staged-recipes,ReimarBauer/staged-recipes,bmabey/staged-recipes,dschreij/staged-recipes,mcs07/staged-recipes,kwilcox/staged-recipes,basnijholt/staged-recipes,koverholt/staged-recipes,atedstone/staged-recipes,SylvainCorlay/staged-recipes,dharhas/staged-recipes,pmlandwehr/staged-recipes,shadowwalkersb/staged-recipes,valgur/staged-recipes,khallock/staged-recipes,barkls/staged-recipes,glemaitre/staged-recipes,dfroger/staged-recipes,hbredin/staged-recipes,benvandyke/staged-recipes,gqmelo/staged-recipes,synapticarbors/staged-recipes,ReimarBauer/staged-recipes,koverholt/staged-recipes,conda-forge/staged-recipes,Cashalow/staged-recipes,caspervdw/staged-recipes,johanneskoester/staged-recipes,ceholden/staged-recipes,rolando-contrib/staged-recipes,hbredin/staged-recipes,rmcgibbo/staged-recipes,conda-forge/staged-recipes,bmabey/staged-recipes,scopatz/staged-recipes,SylvainCorlay/staged-recipes,blowekamp/staged-recipes,ocefpaf/staged-recipes,guillochon/staged-recipes,chohner/staged-recipes,planetarypy/staged-recipes,blowekamp/staged-recipes,asmeurer/staged-recipes,pmlandwehr/staged-recipes,jcb91/staged-recipes,hadim/staged-recipes,barkls/staged-recipes,pstjohn/staged-recipes,igortg/staged-recipes,mcs07/staged-recipes,Savvysherpa/staged-recipes,larray-project/staged-recipes,rolando-contrib/staged-recipes,hajapy/staged-recipes,jjhelmus/staged-recipes,OpenPIV/staged-recipes,johanneskoester/staged-recipes,grlee77/staged-recipes,synapticarbors/staged-recipes,goanpeca/staged-recipes
|
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
+ subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
+ subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
Add tests for svg and pdf on Windows
|
## Code Before:
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
## Instruction:
Add tests for svg and pdf on Windows
## Code After:
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
+ subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
+ subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
ccf9e48cf874e7970c5b2e587e797a0501483139
|
spec/data/anagram_index_spec.py
|
spec/data/anagram_index_spec.py
|
from data import anagram_index, warehouse
from spec.mamba import *
with description('anagram_index'):
with before.all:
self.subject = anagram_index.AnagramIndex(warehouse.get('/words/unigram'))
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
import collections
from data import anagram_index
from spec.data.fixtures import tries
from spec.mamba import *
with description('anagram_index'):
with before.all:
words = collections.OrderedDict(tries.kitchen_sink_data())
self.subject = anagram_index.AnagramIndex(words)
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
Update anagram index spec data source.
|
Update anagram index spec data source.
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
+ import collections
+
- from data import anagram_index, warehouse
+ from data import anagram_index
+ from spec.data.fixtures import tries
from spec.mamba import *
with description('anagram_index'):
with before.all:
+ words = collections.OrderedDict(tries.kitchen_sink_data())
- self.subject = anagram_index.AnagramIndex(warehouse.get('/words/unigram'))
+ self.subject = anagram_index.AnagramIndex(words)
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
Update anagram index spec data source.
|
## Code Before:
from data import anagram_index, warehouse
from spec.mamba import *
with description('anagram_index'):
with before.all:
self.subject = anagram_index.AnagramIndex(warehouse.get('/words/unigram'))
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
## Instruction:
Update anagram index spec data source.
## Code After:
import collections
from data import anagram_index
from spec.data.fixtures import tries
from spec.mamba import *
with description('anagram_index'):
with before.all:
words = collections.OrderedDict(tries.kitchen_sink_data())
self.subject = anagram_index.AnagramIndex(words)
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
+ import collections
+
- from data import anagram_index, warehouse
? -----------
+ from data import anagram_index
+ from spec.data.fixtures import tries
from spec.mamba import *
with description('anagram_index'):
with before.all:
+ words = collections.OrderedDict(tries.kitchen_sink_data())
- self.subject = anagram_index.AnagramIndex(warehouse.get('/words/unigram'))
? ---------------- --------- -
+ self.subject = anagram_index.AnagramIndex(words)
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
5bc0226fe1ad03495e97dc2933fa17d18cd38bb9
|
meetup_facebook_bot/models/speaker.py
|
meetup_facebook_bot/models/speaker.py
|
from sqlalchemy import Column, BIGINT, String, Integer
from meetup_facebook_bot.models.base import Base
class Speaker(Base):
__tablename__ = 'speakers'
id = Column(Integer, primary_key=True, autoincrement=True)
page_scoped_id = Column(BIGINT, unique=True)
name = Column(String(128), nullable=False)
token = Column(String(128), unique=True, nullable=False)
def __repr__(self):
return '<Speaker %r>' % self.id
|
from sqlalchemy import Column, BIGINT, String, Integer
from meetup_facebook_bot.models.base import Base
class Speaker(Base):
__tablename__ = 'speakers'
id = Column(Integer, primary_key=True, autoincrement=True)
page_scoped_id = Column(BIGINT)
name = Column(String(128), nullable=False)
token = Column(String(128), unique=True, nullable=False)
def __repr__(self):
return '<Speaker %r>' % self.id
|
Remove uniqueness constraint from page_scoped_id
|
Remove uniqueness constraint from page_scoped_id
|
Python
|
mit
|
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
|
from sqlalchemy import Column, BIGINT, String, Integer
from meetup_facebook_bot.models.base import Base
class Speaker(Base):
__tablename__ = 'speakers'
id = Column(Integer, primary_key=True, autoincrement=True)
- page_scoped_id = Column(BIGINT, unique=True)
+ page_scoped_id = Column(BIGINT)
name = Column(String(128), nullable=False)
token = Column(String(128), unique=True, nullable=False)
def __repr__(self):
return '<Speaker %r>' % self.id
|
Remove uniqueness constraint from page_scoped_id
|
## Code Before:
from sqlalchemy import Column, BIGINT, String, Integer
from meetup_facebook_bot.models.base import Base
class Speaker(Base):
__tablename__ = 'speakers'
id = Column(Integer, primary_key=True, autoincrement=True)
page_scoped_id = Column(BIGINT, unique=True)
name = Column(String(128), nullable=False)
token = Column(String(128), unique=True, nullable=False)
def __repr__(self):
return '<Speaker %r>' % self.id
## Instruction:
Remove uniqueness constraint from page_scoped_id
## Code After:
from sqlalchemy import Column, BIGINT, String, Integer
from meetup_facebook_bot.models.base import Base
class Speaker(Base):
__tablename__ = 'speakers'
id = Column(Integer, primary_key=True, autoincrement=True)
page_scoped_id = Column(BIGINT)
name = Column(String(128), nullable=False)
token = Column(String(128), unique=True, nullable=False)
def __repr__(self):
return '<Speaker %r>' % self.id
|
from sqlalchemy import Column, BIGINT, String, Integer
from meetup_facebook_bot.models.base import Base
class Speaker(Base):
__tablename__ = 'speakers'
id = Column(Integer, primary_key=True, autoincrement=True)
- page_scoped_id = Column(BIGINT, unique=True)
? -------------
+ page_scoped_id = Column(BIGINT)
name = Column(String(128), nullable=False)
token = Column(String(128), unique=True, nullable=False)
def __repr__(self):
return '<Speaker %r>' % self.id
|
4de23cffa16c71e287efba7d32ba375feeb9bc13
|
format_json.py
|
format_json.py
|
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
|
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
fp.truncate(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
|
Truncate the file before writing more data.
|
Truncate the file before writing more data.
|
Python
|
mit
|
nbeaver/cmd-oysters,nbeaver/cmd-oysters
|
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
+ fp.truncate(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
|
Truncate the file before writing more data.
|
## Code Before:
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
## Instruction:
Truncate the file before writing more data.
## Code After:
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
fp.truncate(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
|
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
+ fp.truncate(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
|
b5ca3dd7b5c743987223b42e302a4044367d4dc9
|
opps/core/admin/article.py
|
opps/core/admin/article.py
|
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage, PostSource
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostSourceInline(admin.TabularInline):
model = PostSource
fk_name = 'post'
raw_id_fields = ['source']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('source', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline, PostSourceInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
Create post source inline (admin Tabular Inline) on core post
|
Create post source inline (admin Tabular Inline) on core post
|
Python
|
mit
|
YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,opps/opps,opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps
|
from django.contrib import admin
from django import forms
- from opps.core.models import Post, PostImage
+ from opps.core.models import Post, PostImage, PostSource
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
+ class PostSourceInline(admin.TabularInline):
+ model = PostSource
+ fk_name = 'post'
+ raw_id_fields = ['source']
+ actions = None
+ extra = 1
+ fieldsets = [(None, {
+ 'classes': ('collapse',),
+ 'fields': ('source', 'order')})]
+
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
- inlines = [PostImageInline]
+ inlines = [PostImageInline, PostSourceInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
- (None, {'fields': ('main_image', 'credit', 'slug',)})
+ (None, {'fields': ('main_image', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
Create post source inline (admin Tabular Inline) on core post
|
## Code Before:
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
## Instruction:
Create post source inline (admin Tabular Inline) on core post
## Code After:
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage, PostSource
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostSourceInline(admin.TabularInline):
model = PostSource
fk_name = 'post'
raw_id_fields = ['source']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('source', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline, PostSourceInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
from django.contrib import admin
from django import forms
- from opps.core.models import Post, PostImage
+ from opps.core.models import Post, PostImage, PostSource
? ++++++++++++
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
+ class PostSourceInline(admin.TabularInline):
+ model = PostSource
+ fk_name = 'post'
+ raw_id_fields = ['source']
+ actions = None
+ extra = 1
+ fieldsets = [(None, {
+ 'classes': ('collapse',),
+ 'fields': ('source', 'order')})]
+
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
- inlines = [PostImageInline]
+ inlines = [PostImageInline, PostSourceInline]
? ++++++++++++++++++
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
- (None, {'fields': ('main_image', 'credit', 'slug',)})
? ----------
+ (None, {'fields': ('main_image', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
de841f77f6c3eaf60e563fd5cac0d9cb73dac240
|
cairis/core/PasswordManager.py
|
cairis/core/PasswordManager.py
|
from random import choice
from string import ascii_letters, digits
import secretstorage
from keyring import set_password, get_password
__author__ = 'Shamal Faily'
def setDatabasePassword(dbUser):
rp = ''.join(choice(ascii_letters + digits) for i in range(32))
set_password('cairisdb',dbUser,rp)
return rp
def getDatabasePassword(dbUser):
return get_password('cairisdb',dbUser)
|
from random import choice
from string import ascii_letters, digits
import secretstorage
from keyring import set_password, get_password
__author__ = 'Shamal Faily'
def setDatabasePassword(dbUser):
# rp = ''.join(choice(ascii_letters + digits) for i in range(32))
# set_password('cairisdb',dbUser,rp)
# return rp
return ''
def getDatabasePassword(dbUser):
# return get_password('cairisdb',dbUser)
return ''
|
Revert database password policy while problems with keyring investigated
|
Revert database password policy while problems with keyring investigated
|
Python
|
apache-2.0
|
failys/CAIRIS,failys/CAIRIS,failys/CAIRIS
|
from random import choice
from string import ascii_letters, digits
import secretstorage
from keyring import set_password, get_password
__author__ = 'Shamal Faily'
def setDatabasePassword(dbUser):
- rp = ''.join(choice(ascii_letters + digits) for i in range(32))
+ # rp = ''.join(choice(ascii_letters + digits) for i in range(32))
- set_password('cairisdb',dbUser,rp)
+ # set_password('cairisdb',dbUser,rp)
- return rp
+ # return rp
+ return ''
def getDatabasePassword(dbUser):
- return get_password('cairisdb',dbUser)
+ # return get_password('cairisdb',dbUser)
+ return ''
|
Revert database password policy while problems with keyring investigated
|
## Code Before:
from random import choice
from string import ascii_letters, digits
import secretstorage
from keyring import set_password, get_password
__author__ = 'Shamal Faily'
def setDatabasePassword(dbUser):
rp = ''.join(choice(ascii_letters + digits) for i in range(32))
set_password('cairisdb',dbUser,rp)
return rp
def getDatabasePassword(dbUser):
return get_password('cairisdb',dbUser)
## Instruction:
Revert database password policy while problems with keyring investigated
## Code After:
from random import choice
from string import ascii_letters, digits
import secretstorage
from keyring import set_password, get_password
__author__ = 'Shamal Faily'
def setDatabasePassword(dbUser):
# rp = ''.join(choice(ascii_letters + digits) for i in range(32))
# set_password('cairisdb',dbUser,rp)
# return rp
return ''
def getDatabasePassword(dbUser):
# return get_password('cairisdb',dbUser)
return ''
|
from random import choice
from string import ascii_letters, digits
import secretstorage
from keyring import set_password, get_password
__author__ = 'Shamal Faily'
def setDatabasePassword(dbUser):
- rp = ''.join(choice(ascii_letters + digits) for i in range(32))
+ # rp = ''.join(choice(ascii_letters + digits) for i in range(32))
? +
- set_password('cairisdb',dbUser,rp)
+ # set_password('cairisdb',dbUser,rp)
? +
- return rp
+ # return rp
? +
+ return ''
def getDatabasePassword(dbUser):
- return get_password('cairisdb',dbUser)
+ # return get_password('cairisdb',dbUser)
? +
+ return ''
|
c5d656cff3e7ac218cc41805dfb8c19f63cd4250
|
run_server.py
|
run_server.py
|
from shorter.web import app
if __name__ == "__main__":
app.run()
|
from shorter.database import (
User,
db_session,
)
from shorter.web import app
if __name__ == "__main__":
# makes testing easier
test_user_created = db_session.query(User).filter_by(
username='jimmy').one_or_none()
if not test_user_created:
db_session.add(
User(username='jimmy', password='secret'))
db_session.commit()
app.run()
|
Create a testing user on starting the server
|
Create a testing user on starting the server
|
Python
|
agpl-3.0
|
mapleoin/shorter
|
+ from shorter.database import (
+ User,
+ db_session,
+ )
from shorter.web import app
if __name__ == "__main__":
+ # makes testing easier
+ test_user_created = db_session.query(User).filter_by(
+ username='jimmy').one_or_none()
+ if not test_user_created:
+ db_session.add(
+ User(username='jimmy', password='secret'))
+ db_session.commit()
+
app.run()
|
Create a testing user on starting the server
|
## Code Before:
from shorter.web import app
if __name__ == "__main__":
app.run()
## Instruction:
Create a testing user on starting the server
## Code After:
from shorter.database import (
User,
db_session,
)
from shorter.web import app
if __name__ == "__main__":
# makes testing easier
test_user_created = db_session.query(User).filter_by(
username='jimmy').one_or_none()
if not test_user_created:
db_session.add(
User(username='jimmy', password='secret'))
db_session.commit()
app.run()
|
+ from shorter.database import (
+ User,
+ db_session,
+ )
from shorter.web import app
if __name__ == "__main__":
+ # makes testing easier
+ test_user_created = db_session.query(User).filter_by(
+ username='jimmy').one_or_none()
+ if not test_user_created:
+ db_session.add(
+ User(username='jimmy', password='secret'))
+ db_session.commit()
+
app.run()
|
bfd34a7aaf903c823d41068173c09bc5b1a251bc
|
test/sasdataloader/test/utest_sesans.py
|
test/sasdataloader/test/utest_sesans.py
|
import unittest
from sas.sascalc.dataloader.loader import Loader
import os.path
class sesans_reader(unittest.TestCase):
def setUp(self):
self.loader = Loader()
def test_sesans_load(self):
"""
Test .SES file loading
"""
f =self.loader.load("sphere3micron.ses")
# self.assertEqual(f, 5)
self.assertEqual(len(f.x), 40)
self.assertEqual(f.x[0], 391.56)
self.assertEqual(f.x[-1], 46099)
self.assertEqual(f.y[-1], -0.19956)
self.assertEqual(f.x_unit, "A")
self.assertEqual(f.y_unit, "A-2 cm-1")
self.assertEqual(f.sample.name, "Polystyrene 2 um in 53% H2O, 47% D2O")
self.assertEqual(f.sample.thickness, 0.2)
self.assertEqual(f.sample.zacceptance, (0.0168, "radians"))
if __name__ == "__main__":
unittest.main()
|
import unittest
from sas.sascalc.dataloader.loader import Loader
import os.path
class sesans_reader(unittest.TestCase):
def setUp(self):
self.loader = Loader()
def test_sesans_load(self):
"""
Test .SES file loading
"""
f =self.loader.load("sphere3micron.ses")
# self.assertEqual(f, 5)
self.assertEqual(len(f.x), 40)
self.assertEqual(f.x[0], 391.56)
self.assertEqual(f.x[-1], 46099)
self.assertEqual(f.y[-1], -0.19956)
self.assertEqual(f.x_unit, "A")
self.assertEqual(f.y_unit, "A-2 cm-1")
self.assertEqual(f.sample.name, "Polystyrene 2 um in 53% H2O, 47% D2O")
self.assertEqual(f.sample.thickness, 0.2)
self.assertEqual(f.sample.zacceptance, (0.0168, "radians"))
self.assertEqual(f.isSesans, True)
if __name__ == "__main__":
unittest.main()
|
Test that .SES files are tagged as Sesans
|
Test that .SES files are tagged as Sesans
|
Python
|
bsd-3-clause
|
lewisodriscoll/sasview,lewisodriscoll/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview,lewisodriscoll/sasview
|
import unittest
from sas.sascalc.dataloader.loader import Loader
import os.path
class sesans_reader(unittest.TestCase):
def setUp(self):
self.loader = Loader()
def test_sesans_load(self):
"""
Test .SES file loading
"""
f =self.loader.load("sphere3micron.ses")
# self.assertEqual(f, 5)
self.assertEqual(len(f.x), 40)
self.assertEqual(f.x[0], 391.56)
self.assertEqual(f.x[-1], 46099)
self.assertEqual(f.y[-1], -0.19956)
self.assertEqual(f.x_unit, "A")
self.assertEqual(f.y_unit, "A-2 cm-1")
self.assertEqual(f.sample.name, "Polystyrene 2 um in 53% H2O, 47% D2O")
self.assertEqual(f.sample.thickness, 0.2)
self.assertEqual(f.sample.zacceptance, (0.0168, "radians"))
+ self.assertEqual(f.isSesans, True)
if __name__ == "__main__":
unittest.main()
|
Test that .SES files are tagged as Sesans
|
## Code Before:
import unittest
from sas.sascalc.dataloader.loader import Loader
import os.path
class sesans_reader(unittest.TestCase):
def setUp(self):
self.loader = Loader()
def test_sesans_load(self):
"""
Test .SES file loading
"""
f =self.loader.load("sphere3micron.ses")
# self.assertEqual(f, 5)
self.assertEqual(len(f.x), 40)
self.assertEqual(f.x[0], 391.56)
self.assertEqual(f.x[-1], 46099)
self.assertEqual(f.y[-1], -0.19956)
self.assertEqual(f.x_unit, "A")
self.assertEqual(f.y_unit, "A-2 cm-1")
self.assertEqual(f.sample.name, "Polystyrene 2 um in 53% H2O, 47% D2O")
self.assertEqual(f.sample.thickness, 0.2)
self.assertEqual(f.sample.zacceptance, (0.0168, "radians"))
if __name__ == "__main__":
unittest.main()
## Instruction:
Test that .SES files are tagged as Sesans
## Code After:
import unittest
from sas.sascalc.dataloader.loader import Loader
import os.path
class sesans_reader(unittest.TestCase):
def setUp(self):
self.loader = Loader()
def test_sesans_load(self):
"""
Test .SES file loading
"""
f =self.loader.load("sphere3micron.ses")
# self.assertEqual(f, 5)
self.assertEqual(len(f.x), 40)
self.assertEqual(f.x[0], 391.56)
self.assertEqual(f.x[-1], 46099)
self.assertEqual(f.y[-1], -0.19956)
self.assertEqual(f.x_unit, "A")
self.assertEqual(f.y_unit, "A-2 cm-1")
self.assertEqual(f.sample.name, "Polystyrene 2 um in 53% H2O, 47% D2O")
self.assertEqual(f.sample.thickness, 0.2)
self.assertEqual(f.sample.zacceptance, (0.0168, "radians"))
self.assertEqual(f.isSesans, True)
if __name__ == "__main__":
unittest.main()
|
import unittest
from sas.sascalc.dataloader.loader import Loader
import os.path
class sesans_reader(unittest.TestCase):
def setUp(self):
self.loader = Loader()
def test_sesans_load(self):
"""
Test .SES file loading
"""
f =self.loader.load("sphere3micron.ses")
# self.assertEqual(f, 5)
self.assertEqual(len(f.x), 40)
self.assertEqual(f.x[0], 391.56)
self.assertEqual(f.x[-1], 46099)
self.assertEqual(f.y[-1], -0.19956)
self.assertEqual(f.x_unit, "A")
self.assertEqual(f.y_unit, "A-2 cm-1")
self.assertEqual(f.sample.name, "Polystyrene 2 um in 53% H2O, 47% D2O")
self.assertEqual(f.sample.thickness, 0.2)
self.assertEqual(f.sample.zacceptance, (0.0168, "radians"))
+ self.assertEqual(f.isSesans, True)
if __name__ == "__main__":
unittest.main()
|
4cad1d743f2c70c3ee046b59d98aecb6b5b301d6
|
src/event_manager/views/base.py
|
src/event_manager/views/base.py
|
from django.shortcuts import render, redirect
from django.http import *
from django.contrib.auth import authenticate, login
def home(request):
return render(request, 'login.html', {})
def login_user(request):
logout(request)
username = ""
password = ""
if request.POST:
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect('/e/')
return render(request, 'login.html', {})
|
from django.shortcuts import render, redirect
from django.http import *
from django.contrib.auth import authenticate, login
def home(request):
return render(request, 'login.html', {})
def login_user(request):
logout(request)
username = ""
password = ""
if request.POST:
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect('/e/')
return render(request, 'login.html', {})
def register_user(request):
pass
|
Create shell function for register_user
|
Create shell function for register_user
|
Python
|
agpl-3.0
|
DavidJFelix/hatchit,DavidJFelix/hatchit,DavidJFelix/hatchit
|
from django.shortcuts import render, redirect
from django.http import *
from django.contrib.auth import authenticate, login
def home(request):
return render(request, 'login.html', {})
def login_user(request):
logout(request)
username = ""
password = ""
if request.POST:
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect('/e/')
return render(request, 'login.html', {})
+
+
+ def register_user(request):
+ pass
|
Create shell function for register_user
|
## Code Before:
from django.shortcuts import render, redirect
from django.http import *
from django.contrib.auth import authenticate, login
def home(request):
return render(request, 'login.html', {})
def login_user(request):
logout(request)
username = ""
password = ""
if request.POST:
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect('/e/')
return render(request, 'login.html', {})
## Instruction:
Create shell function for register_user
## Code After:
from django.shortcuts import render, redirect
from django.http import *
from django.contrib.auth import authenticate, login
def home(request):
return render(request, 'login.html', {})
def login_user(request):
logout(request)
username = ""
password = ""
if request.POST:
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect('/e/')
return render(request, 'login.html', {})
def register_user(request):
pass
|
from django.shortcuts import render, redirect
from django.http import *
from django.contrib.auth import authenticate, login
def home(request):
return render(request, 'login.html', {})
def login_user(request):
logout(request)
username = ""
password = ""
if request.POST:
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect('/e/')
return render(request, 'login.html', {})
+
+
+ def register_user(request):
+ pass
|
d6a8b995f2a1b069729f07ef43b966b2f15fd3b3
|
linter.py
|
linter.py
|
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '@')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '${file}')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
r' \((?P<code>.+)\)$'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
Support the new SublimeLinter `code` property
|
Support the new SublimeLinter `code` property
|
Python
|
mit
|
sindresorhus/SublimeLinter-contrib-xo,sindresorhus/SublimeLinter-contrib-xo
|
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
- cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '@')
+ cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '${file}')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
+ r' \((?P<code>.+)\)$'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
Support the new SublimeLinter `code` property
|
## Code Before:
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '@')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
## Instruction:
Support the new SublimeLinter `code` property
## Code After:
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '${file}')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
r' \((?P<code>.+)\)$'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
- cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '@')
? ^
+ cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '${file}')
? ^^^^^^^
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
+ r' \((?P<code>.+)\)$'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
ba2913658e3770ef73d0e7972435def32199cc08
|
test.py
|
test.py
|
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
def generate_data(size):
""""
x_train = []
y_train = []
for i in range(size):
x = random.randint(0, 100)
y = 2*x
x_train.append(x)
y_train.append(y)
return np.array(x_train), np.array(y_train)
"""
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])
x_train, y_train = generate_data(1000)
x_test, y_test = generate_data(10)
model.fit(x_train, y_train, epochs=1000, batch_size=32)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=128)
print(loss_and_metrics)
|
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
def generate_data(size):
""""
x_train = []
y_train = []
for i in range(size):
x = random.randint(0, 100)
y = 2*x
x_train.append(x)
y_train.append(y)
return np.array(x_train), np.array(y_train)
"""
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
#data = np.random.random((10000, 2))
#labels = np.sum(data, (1,))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
loss='mse',
metrics=['accuracy'])
x_train, y_train = generate_data(10000)
x_test, y_test = generate_data(100)
model.fit(x_train, y_train, epochs=30, batch_size=32)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=16)
#print(loss_and_metrics)
pred = model.predict(x_test, batch_size=32, verbose=0)
print("expected:")
print(y_test)
print("actual:")
print(pred)
|
Fix lirear regression model loss calculation
|
Fix lirear regression model loss calculation
|
Python
|
apache-2.0
|
alexkorep/dogs-vs-cats
|
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
def generate_data(size):
""""
x_train = []
y_train = []
for i in range(size):
x = random.randint(0, 100)
y = 2*x
x_train.append(x)
y_train.append(y)
return np.array(x_train), np.array(y_train)
"""
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
+ #data = np.random.random((10000, 2))
+ #labels = np.sum(data, (1,))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
- loss='binary_crossentropy',
+ loss='mse',
metrics=['accuracy'])
- x_train, y_train = generate_data(1000)
+ x_train, y_train = generate_data(10000)
- x_test, y_test = generate_data(10)
+ x_test, y_test = generate_data(100)
- model.fit(x_train, y_train, epochs=1000, batch_size=32)
+ model.fit(x_train, y_train, epochs=30, batch_size=32)
- loss_and_metrics = model.evaluate(x_test, y_test, batch_size=128)
+ loss_and_metrics = model.evaluate(x_test, y_test, batch_size=16)
+ #print(loss_and_metrics)
- print(loss_and_metrics)
+ pred = model.predict(x_test, batch_size=32, verbose=0)
+ print("expected:")
+ print(y_test)
+ print("actual:")
+ print(pred)
+
+
|
Fix lirear regression model loss calculation
|
## Code Before:
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
def generate_data(size):
""""
x_train = []
y_train = []
for i in range(size):
x = random.randint(0, 100)
y = 2*x
x_train.append(x)
y_train.append(y)
return np.array(x_train), np.array(y_train)
"""
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])
x_train, y_train = generate_data(1000)
x_test, y_test = generate_data(10)
model.fit(x_train, y_train, epochs=1000, batch_size=32)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=128)
print(loss_and_metrics)
## Instruction:
Fix lirear regression model loss calculation
## Code After:
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
def generate_data(size):
""""
x_train = []
y_train = []
for i in range(size):
x = random.randint(0, 100)
y = 2*x
x_train.append(x)
y_train.append(y)
return np.array(x_train), np.array(y_train)
"""
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
#data = np.random.random((10000, 2))
#labels = np.sum(data, (1,))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
loss='mse',
metrics=['accuracy'])
x_train, y_train = generate_data(10000)
x_test, y_test = generate_data(100)
model.fit(x_train, y_train, epochs=30, batch_size=32)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=16)
#print(loss_and_metrics)
pred = model.predict(x_test, batch_size=32, verbose=0)
print("expected:")
print(y_test)
print("actual:")
print(pred)
|
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
def generate_data(size):
""""
x_train = []
y_train = []
for i in range(size):
x = random.randint(0, 100)
y = 2*x
x_train.append(x)
y_train.append(y)
return np.array(x_train), np.array(y_train)
"""
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
+ #data = np.random.random((10000, 2))
+ #labels = np.sum(data, (1,))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
- loss='binary_crossentropy',
+ loss='mse',
metrics=['accuracy'])
- x_train, y_train = generate_data(1000)
+ x_train, y_train = generate_data(10000)
? +
- x_test, y_test = generate_data(10)
+ x_test, y_test = generate_data(100)
? +
- model.fit(x_train, y_train, epochs=1000, batch_size=32)
? ^^^
+ model.fit(x_train, y_train, epochs=30, batch_size=32)
? ^
- loss_and_metrics = model.evaluate(x_test, y_test, batch_size=128)
? ^^
+ loss_and_metrics = model.evaluate(x_test, y_test, batch_size=16)
? ^
+ #print(loss_and_metrics)
- print(loss_and_metrics)
+ pred = model.predict(x_test, batch_size=32, verbose=0)
+
+ print("expected:")
+ print(y_test)
+ print("actual:")
+ print(pred)
+
|
a521c4a4a55437452a4a7d006ec8faea0521ea05
|
capstone/rl/learners/sarsa.py
|
capstone/rl/learners/sarsa.py
|
from ..learner import Learner
from ..policies import RandomPolicy
from ..value_functions import TabularQ
from ...utils import check_random_state
class Sarsa(Learner):
def __init__(self, env, policy=None, learning_rate=0.1, discount_factor=0.99,
n_episodes=1000, verbose=True, random_state=None):
super(Sarsa, self).__init__(env, n_episodes=n_episodes, verbose=verbose)
self.policy = policy
self.learning_rate = learning_rate
self.discount_factor = discount_factor
self.random_state = check_random_state(random_state)
self.policy = policy or RandomPolicy(env.actions, self.random_state)
self.qf = TabularQ(self.random_state)
###########
# Learner #
###########
def episode(self):
state = self.env.cur_state()
action = self.policy.action(state)
while not self.env.is_terminal():
reward, next_state = self.env.do_action(action)
next_action = self.policy.action(next_state)
target = reward + (self.discount_factor * self.qf[next_state, next_action])
td_error = target - self.qf[state, action]
self.qf[state, action] += self.learning_rate * td_error
state, action = next_state, next_action
|
from ..learner import Learner
from ..policies import RandomPolicy
from ..value_functions import TabularQ
from ...utils import check_random_state
class Sarsa(Learner):
def __init__(self, env, policy=None, learning_rate=0.1, discount_factor=0.99,
n_episodes=1000, verbose=True, random_state=None):
super(Sarsa, self).__init__(env, n_episodes=n_episodes, verbose=verbose)
self.policy = policy
self.learning_rate = learning_rate
self.discount_factor = discount_factor
self.random_state = check_random_state(random_state)
self.policy = policy or RandomPolicy(env.actions, random_state=self.random_state)
self.qf = TabularQ(random_state=self.random_state)
###########
# Learner #
###########
def episode(self):
state = self.env.cur_state()
action = self.policy.action(state)
while not self.env.is_terminal():
reward, next_state = self.env.do_action(action)
next_action = self.policy.action(next_state)
target = reward + (self.discount_factor * self.qf[next_state, next_action])
td_error = target - self.qf[state, action]
self.qf[state, action] += self.learning_rate * td_error
state, action = next_state, next_action
|
Create tabular q-function with kwarg random_state
|
Create tabular q-function with kwarg random_state
|
Python
|
mit
|
davidrobles/mlnd-capstone-code
|
from ..learner import Learner
from ..policies import RandomPolicy
from ..value_functions import TabularQ
from ...utils import check_random_state
class Sarsa(Learner):
def __init__(self, env, policy=None, learning_rate=0.1, discount_factor=0.99,
n_episodes=1000, verbose=True, random_state=None):
super(Sarsa, self).__init__(env, n_episodes=n_episodes, verbose=verbose)
self.policy = policy
self.learning_rate = learning_rate
self.discount_factor = discount_factor
self.random_state = check_random_state(random_state)
- self.policy = policy or RandomPolicy(env.actions, self.random_state)
+ self.policy = policy or RandomPolicy(env.actions, random_state=self.random_state)
- self.qf = TabularQ(self.random_state)
+ self.qf = TabularQ(random_state=self.random_state)
###########
# Learner #
###########
def episode(self):
state = self.env.cur_state()
action = self.policy.action(state)
while not self.env.is_terminal():
reward, next_state = self.env.do_action(action)
next_action = self.policy.action(next_state)
target = reward + (self.discount_factor * self.qf[next_state, next_action])
td_error = target - self.qf[state, action]
self.qf[state, action] += self.learning_rate * td_error
state, action = next_state, next_action
|
Create tabular q-function with kwarg random_state
|
## Code Before:
from ..learner import Learner
from ..policies import RandomPolicy
from ..value_functions import TabularQ
from ...utils import check_random_state
class Sarsa(Learner):
def __init__(self, env, policy=None, learning_rate=0.1, discount_factor=0.99,
n_episodes=1000, verbose=True, random_state=None):
super(Sarsa, self).__init__(env, n_episodes=n_episodes, verbose=verbose)
self.policy = policy
self.learning_rate = learning_rate
self.discount_factor = discount_factor
self.random_state = check_random_state(random_state)
self.policy = policy or RandomPolicy(env.actions, self.random_state)
self.qf = TabularQ(self.random_state)
###########
# Learner #
###########
def episode(self):
state = self.env.cur_state()
action = self.policy.action(state)
while not self.env.is_terminal():
reward, next_state = self.env.do_action(action)
next_action = self.policy.action(next_state)
target = reward + (self.discount_factor * self.qf[next_state, next_action])
td_error = target - self.qf[state, action]
self.qf[state, action] += self.learning_rate * td_error
state, action = next_state, next_action
## Instruction:
Create tabular q-function with kwarg random_state
## Code After:
from ..learner import Learner
from ..policies import RandomPolicy
from ..value_functions import TabularQ
from ...utils import check_random_state
class Sarsa(Learner):
def __init__(self, env, policy=None, learning_rate=0.1, discount_factor=0.99,
n_episodes=1000, verbose=True, random_state=None):
super(Sarsa, self).__init__(env, n_episodes=n_episodes, verbose=verbose)
self.policy = policy
self.learning_rate = learning_rate
self.discount_factor = discount_factor
self.random_state = check_random_state(random_state)
self.policy = policy or RandomPolicy(env.actions, random_state=self.random_state)
self.qf = TabularQ(random_state=self.random_state)
###########
# Learner #
###########
def episode(self):
state = self.env.cur_state()
action = self.policy.action(state)
while not self.env.is_terminal():
reward, next_state = self.env.do_action(action)
next_action = self.policy.action(next_state)
target = reward + (self.discount_factor * self.qf[next_state, next_action])
td_error = target - self.qf[state, action]
self.qf[state, action] += self.learning_rate * td_error
state, action = next_state, next_action
|
from ..learner import Learner
from ..policies import RandomPolicy
from ..value_functions import TabularQ
from ...utils import check_random_state
class Sarsa(Learner):
def __init__(self, env, policy=None, learning_rate=0.1, discount_factor=0.99,
n_episodes=1000, verbose=True, random_state=None):
super(Sarsa, self).__init__(env, n_episodes=n_episodes, verbose=verbose)
self.policy = policy
self.learning_rate = learning_rate
self.discount_factor = discount_factor
self.random_state = check_random_state(random_state)
- self.policy = policy or RandomPolicy(env.actions, self.random_state)
+ self.policy = policy or RandomPolicy(env.actions, random_state=self.random_state)
? +++++++++++++
- self.qf = TabularQ(self.random_state)
+ self.qf = TabularQ(random_state=self.random_state)
? +++++++++++++
###########
# Learner #
###########
def episode(self):
state = self.env.cur_state()
action = self.policy.action(state)
while not self.env.is_terminal():
reward, next_state = self.env.do_action(action)
next_action = self.policy.action(next_state)
target = reward + (self.discount_factor * self.qf[next_state, next_action])
td_error = target - self.qf[state, action]
self.qf[state, action] += self.learning_rate * td_error
state, action = next_state, next_action
|
f17d9c3b45758c02f1f67cbec6709e42149369f5
|
packs/asserts/actions/object_equals.py
|
packs/asserts/actions/object_equals.py
|
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
def cmp(x, y):
return (x > y) - (x < y)
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
Make action python 3 compatible
|
Make action python 3 compatible
|
Python
|
apache-2.0
|
StackStorm/st2tests,StackStorm/st2tests,StackStorm/st2tests
|
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
+
+
+ def cmp(x, y):
+ return (x > y) - (x < y)
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
Make action python 3 compatible
|
## Code Before:
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
## Instruction:
Make action python 3 compatible
## Code After:
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
def cmp(x, y):
return (x > y) - (x < y)
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
+
+
+ def cmp(x, y):
+ return (x > y) - (x < y)
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
36a2051e8f0c36923d93e172d453ce0e6fe18512
|
src/tarsnapper/test.py
|
src/tarsnapper/test.py
|
from datetime import datetime
from expire import expire as default_expire_func
__all__ = ('BackupSimulator',)
try:
from collections import OrderedDict # Python 2.7
except ImportError:
# Install from: http://pypi.python.org/pypi/ordereddict
from ordereddict import OrderedDict
class BackupSimulator(object):
"""Helper to simulate making backups, and expire old ones, at
various points in time.
"""
def __init__(self, deltas, expire_func=default_expire_func):
self.deltas = deltas
self.expire_func = expire_func
self.now = datetime.now()
self.backups = OrderedDict()
def go_to(self, dt):
self.now = dt
def go_by(self, td):
self.now += td
def backup(self, expire=True):
self.backups[str(self.now)] = self.now
if expire:
return self.expire()
def expire(self):
keep = self.expire_func(self.backups, self.deltas)
deleted = []
for key in self.backups.keys():
if not key in keep:
deleted.append(key)
del self.backups[key]
return deleted, keep
|
from datetime import datetime
from expire import expire as default_expire_func
from config import parse_deltas
__all__ = ('BackupSimulator',)
try:
from collections import OrderedDict # Python 2.7
except ImportError:
# Install from: http://pypi.python.org/pypi/ordereddict
from ordereddict import OrderedDict
class BackupSimulator(object):
"""Helper to simulate making backups, and expire old ones, at
various points in time.
"""
def __init__(self, deltas, expire_func=default_expire_func):
if isinstance(deltas, basestring):
deltas = parse_deltas(deltas)
self.deltas = deltas
self.expire_func = expire_func
self.now = datetime.now()
self.backups = OrderedDict()
def add(self, backups):
for dt in backups:
if isinstance(dt, basestring):
dt = datetime.strptime(dt, "%Y%m%d-%H%M%S")
self.backups[str(dt)] = dt
def go_to(self, dt):
self.now = dt
def go_by(self, td):
self.now += td
def backup(self, expire=True):
self.add([self.now])
if expire:
return self.expire()
def expire(self):
keep = self.expire_func(self.backups, self.deltas)
deleted = []
for key in self.backups.keys():
if not key in keep:
deleted.append(key)
del self.backups[key]
return deleted, keep
|
Allow using strings for deltas and dates.
|
Allow using strings for deltas and dates.
|
Python
|
bsd-2-clause
|
jyrkij/tarsnapper
|
from datetime import datetime
from expire import expire as default_expire_func
+ from config import parse_deltas
__all__ = ('BackupSimulator',)
try:
from collections import OrderedDict # Python 2.7
except ImportError:
# Install from: http://pypi.python.org/pypi/ordereddict
from ordereddict import OrderedDict
class BackupSimulator(object):
"""Helper to simulate making backups, and expire old ones, at
various points in time.
"""
def __init__(self, deltas, expire_func=default_expire_func):
+ if isinstance(deltas, basestring):
+ deltas = parse_deltas(deltas)
self.deltas = deltas
self.expire_func = expire_func
self.now = datetime.now()
self.backups = OrderedDict()
+
+ def add(self, backups):
+ for dt in backups:
+ if isinstance(dt, basestring):
+ dt = datetime.strptime(dt, "%Y%m%d-%H%M%S")
+ self.backups[str(dt)] = dt
def go_to(self, dt):
self.now = dt
def go_by(self, td):
self.now += td
def backup(self, expire=True):
- self.backups[str(self.now)] = self.now
+ self.add([self.now])
if expire:
return self.expire()
def expire(self):
keep = self.expire_func(self.backups, self.deltas)
deleted = []
for key in self.backups.keys():
if not key in keep:
deleted.append(key)
del self.backups[key]
return deleted, keep
|
Allow using strings for deltas and dates.
|
## Code Before:
from datetime import datetime
from expire import expire as default_expire_func
__all__ = ('BackupSimulator',)
try:
from collections import OrderedDict # Python 2.7
except ImportError:
# Install from: http://pypi.python.org/pypi/ordereddict
from ordereddict import OrderedDict
class BackupSimulator(object):
"""Helper to simulate making backups, and expire old ones, at
various points in time.
"""
def __init__(self, deltas, expire_func=default_expire_func):
self.deltas = deltas
self.expire_func = expire_func
self.now = datetime.now()
self.backups = OrderedDict()
def go_to(self, dt):
self.now = dt
def go_by(self, td):
self.now += td
def backup(self, expire=True):
self.backups[str(self.now)] = self.now
if expire:
return self.expire()
def expire(self):
keep = self.expire_func(self.backups, self.deltas)
deleted = []
for key in self.backups.keys():
if not key in keep:
deleted.append(key)
del self.backups[key]
return deleted, keep
## Instruction:
Allow using strings for deltas and dates.
## Code After:
from datetime import datetime
from expire import expire as default_expire_func
from config import parse_deltas
__all__ = ('BackupSimulator',)
try:
from collections import OrderedDict # Python 2.7
except ImportError:
# Install from: http://pypi.python.org/pypi/ordereddict
from ordereddict import OrderedDict
class BackupSimulator(object):
"""Helper to simulate making backups, and expire old ones, at
various points in time.
"""
def __init__(self, deltas, expire_func=default_expire_func):
if isinstance(deltas, basestring):
deltas = parse_deltas(deltas)
self.deltas = deltas
self.expire_func = expire_func
self.now = datetime.now()
self.backups = OrderedDict()
def add(self, backups):
for dt in backups:
if isinstance(dt, basestring):
dt = datetime.strptime(dt, "%Y%m%d-%H%M%S")
self.backups[str(dt)] = dt
def go_to(self, dt):
self.now = dt
def go_by(self, td):
self.now += td
def backup(self, expire=True):
self.add([self.now])
if expire:
return self.expire()
def expire(self):
keep = self.expire_func(self.backups, self.deltas)
deleted = []
for key in self.backups.keys():
if not key in keep:
deleted.append(key)
del self.backups[key]
return deleted, keep
|
from datetime import datetime
from expire import expire as default_expire_func
+ from config import parse_deltas
__all__ = ('BackupSimulator',)
try:
from collections import OrderedDict # Python 2.7
except ImportError:
# Install from: http://pypi.python.org/pypi/ordereddict
from ordereddict import OrderedDict
class BackupSimulator(object):
"""Helper to simulate making backups, and expire old ones, at
various points in time.
"""
def __init__(self, deltas, expire_func=default_expire_func):
+ if isinstance(deltas, basestring):
+ deltas = parse_deltas(deltas)
self.deltas = deltas
self.expire_func = expire_func
self.now = datetime.now()
self.backups = OrderedDict()
+
+ def add(self, backups):
+ for dt in backups:
+ if isinstance(dt, basestring):
+ dt = datetime.strptime(dt, "%Y%m%d-%H%M%S")
+ self.backups[str(dt)] = dt
def go_to(self, dt):
self.now = dt
def go_by(self, td):
self.now += td
def backup(self, expire=True):
- self.backups[str(self.now)] = self.now
+ self.add([self.now])
if expire:
return self.expire()
def expire(self):
keep = self.expire_func(self.backups, self.deltas)
deleted = []
for key in self.backups.keys():
if not key in keep:
deleted.append(key)
del self.backups[key]
return deleted, keep
|
d000a2e3991c54b319bc7166d9d178b739170a46
|
polling_stations/apps/data_collection/management/commands/import_sheffield.py
|
polling_stations/apps/data_collection/management/commands/import_sheffield.py
|
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Sheffield
"""
council_id = 'E08000019'
districts_name = 'SCCPollingDistricts2015'
stations_name = 'SCCPollingStations2015.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[1],
}
def station_record_to_dict(self, record):
address = record[1]
# remove postcode from end of address if present
postcode_offset = -len(record[2])
if address[postcode_offset:] == record[2]:
address = address[:postcode_offset].strip()
# remove trailing comma if present
if address[-1:] == ',':
address = address[:-1]
# replace commas with \n
address = "\n".join(map(lambda x: x.strip(), address.split(',')))
return {
'internal_council_id': record[0],
'postcode' : record[2],
'address' : address
}
|
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Sheffield
"""
council_id = 'E08000019'
districts_name = 'SCCPollingDistricts2015'
stations_name = 'SCCPollingStations2015.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[1],
'extra_id': record[0],
'name': record[1],
}
def station_record_to_dict(self, record):
address = record[1]
# remove postcode from end of address if present
postcode_offset = -len(record[2])
if address[postcode_offset:] == record[2]:
address = address[:postcode_offset].strip()
# remove trailing comma if present
if address[-1:] == ',':
address = address[:-1]
# replace commas with \n
address = "\n".join(map(lambda x: x.strip(), address.split(',')))
return {
'internal_council_id': record[0],
'postcode' : record[2],
'address' : address,
'polling_district_id': record[-1]
}
|
Add polling_district_id in Sheffield import script
|
Add polling_district_id in Sheffield import script
|
Python
|
bsd-3-clause
|
DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations
|
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Sheffield
"""
council_id = 'E08000019'
districts_name = 'SCCPollingDistricts2015'
stations_name = 'SCCPollingStations2015.shp'
def district_record_to_dict(self, record):
return {
- 'internal_council_id': record[0],
+ 'internal_council_id': record[1],
+ 'extra_id': record[0],
- 'name': record[1],
+ 'name': record[1],
}
def station_record_to_dict(self, record):
address = record[1]
# remove postcode from end of address if present
postcode_offset = -len(record[2])
if address[postcode_offset:] == record[2]:
address = address[:postcode_offset].strip()
# remove trailing comma if present
if address[-1:] == ',':
address = address[:-1]
# replace commas with \n
address = "\n".join(map(lambda x: x.strip(), address.split(',')))
return {
'internal_council_id': record[0],
'postcode' : record[2],
- 'address' : address
+ 'address' : address,
+ 'polling_district_id': record[-1]
}
|
Add polling_district_id in Sheffield import script
|
## Code Before:
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Sheffield
"""
council_id = 'E08000019'
districts_name = 'SCCPollingDistricts2015'
stations_name = 'SCCPollingStations2015.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[1],
}
def station_record_to_dict(self, record):
address = record[1]
# remove postcode from end of address if present
postcode_offset = -len(record[2])
if address[postcode_offset:] == record[2]:
address = address[:postcode_offset].strip()
# remove trailing comma if present
if address[-1:] == ',':
address = address[:-1]
# replace commas with \n
address = "\n".join(map(lambda x: x.strip(), address.split(',')))
return {
'internal_council_id': record[0],
'postcode' : record[2],
'address' : address
}
## Instruction:
Add polling_district_id in Sheffield import script
## Code After:
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Sheffield
"""
council_id = 'E08000019'
districts_name = 'SCCPollingDistricts2015'
stations_name = 'SCCPollingStations2015.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[1],
'extra_id': record[0],
'name': record[1],
}
def station_record_to_dict(self, record):
address = record[1]
# remove postcode from end of address if present
postcode_offset = -len(record[2])
if address[postcode_offset:] == record[2]:
address = address[:postcode_offset].strip()
# remove trailing comma if present
if address[-1:] == ',':
address = address[:-1]
# replace commas with \n
address = "\n".join(map(lambda x: x.strip(), address.split(',')))
return {
'internal_council_id': record[0],
'postcode' : record[2],
'address' : address,
'polling_district_id': record[-1]
}
|
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Sheffield
"""
council_id = 'E08000019'
districts_name = 'SCCPollingDistricts2015'
stations_name = 'SCCPollingStations2015.shp'
def district_record_to_dict(self, record):
return {
- 'internal_council_id': record[0],
? ^
+ 'internal_council_id': record[1],
? ^
+ 'extra_id': record[0],
- 'name': record[1],
+ 'name': record[1],
? +++++++++++++++
}
def station_record_to_dict(self, record):
address = record[1]
# remove postcode from end of address if present
postcode_offset = -len(record[2])
if address[postcode_offset:] == record[2]:
address = address[:postcode_offset].strip()
# remove trailing comma if present
if address[-1:] == ',':
address = address[:-1]
# replace commas with \n
address = "\n".join(map(lambda x: x.strip(), address.split(',')))
return {
'internal_council_id': record[0],
'postcode' : record[2],
- 'address' : address
+ 'address' : address,
? +
+ 'polling_district_id': record[-1]
}
|
4ea6a11341c2bbd978d5e0e416c398a442158da6
|
whip/web.py
|
whip/web.py
|
# pylint: disable=missing-docstring
from socket import inet_aton
from flask import Flask, abort, make_response, request
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS', silent=True)
db = None
@app.before_first_request
def _open_db():
global db # pylint: disable=global-statement
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
key = inet_aton(ip)
except OSError:
abort(400)
datetime = request.args.get('datetime')
info_as_json = db.lookup(key, datetime)
if info_as_json is None:
info_as_json = b'{}' # empty dict, JSON-encoded
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
|
# pylint: disable=missing-docstring
from flask import Flask, make_response, request
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS', silent=True)
db = None
@app.before_first_request
def _open_db():
global db # pylint: disable=global-statement
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
datetime = request.args.get('datetime')
info_as_json = db.lookup(ip, datetime)
if info_as_json is None:
info_as_json = b'{}' # empty dict, JSON-encoded
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
|
Handle IPv6 in REST API
|
Handle IPv6 in REST API
|
Python
|
bsd-3-clause
|
wbolster/whip
|
# pylint: disable=missing-docstring
- from socket import inet_aton
-
- from flask import Flask, abort, make_response, request
+ from flask import Flask, make_response, request
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS', silent=True)
db = None
@app.before_first_request
def _open_db():
global db # pylint: disable=global-statement
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
- try:
- key = inet_aton(ip)
- except OSError:
- abort(400)
-
datetime = request.args.get('datetime')
- info_as_json = db.lookup(key, datetime)
+ info_as_json = db.lookup(ip, datetime)
if info_as_json is None:
info_as_json = b'{}' # empty dict, JSON-encoded
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
|
Handle IPv6 in REST API
|
## Code Before:
# pylint: disable=missing-docstring
from socket import inet_aton
from flask import Flask, abort, make_response, request
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS', silent=True)
db = None
@app.before_first_request
def _open_db():
global db # pylint: disable=global-statement
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
key = inet_aton(ip)
except OSError:
abort(400)
datetime = request.args.get('datetime')
info_as_json = db.lookup(key, datetime)
if info_as_json is None:
info_as_json = b'{}' # empty dict, JSON-encoded
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
## Instruction:
Handle IPv6 in REST API
## Code After:
# pylint: disable=missing-docstring
from flask import Flask, make_response, request
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS', silent=True)
db = None
@app.before_first_request
def _open_db():
global db # pylint: disable=global-statement
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
datetime = request.args.get('datetime')
info_as_json = db.lookup(ip, datetime)
if info_as_json is None:
info_as_json = b'{}' # empty dict, JSON-encoded
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
|
# pylint: disable=missing-docstring
- from socket import inet_aton
-
- from flask import Flask, abort, make_response, request
? -------
+ from flask import Flask, make_response, request
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS', silent=True)
db = None
@app.before_first_request
def _open_db():
global db # pylint: disable=global-statement
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
- try:
- key = inet_aton(ip)
- except OSError:
- abort(400)
-
datetime = request.args.get('datetime')
- info_as_json = db.lookup(key, datetime)
? ^^^
+ info_as_json = db.lookup(ip, datetime)
? ^^
if info_as_json is None:
info_as_json = b'{}' # empty dict, JSON-encoded
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
|
c286722965ce7f5ea9acc201aa9cf289cfe16105
|
openstackclient/tests/functional/common/test_availability_zone.py
|
openstackclient/tests/functional/common/test_availability_zone.py
|
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
HEADERS = ["'Zone Name'"]
# So far, all components have the same default availability zone name.
DEFAULT_AZ_NAME = 'nova'
def test_availability_zone_list(self):
opts = self.get_opts(self.HEADERS)
raw_output = self.openstack('availability zone list' + opts)
self.assertIn(self.DEFAULT_AZ_NAME, raw_output)
|
import json
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
def test_availability_zone_list(self):
cmd_output = json.loads(self.openstack(
'availability zone list -f json'))
zones = [x['Zone Name'] for x in cmd_output]
self.assertIn(
'internal',
zones
)
self.assertIn(
'nova',
zones
)
|
Refactor availability zone functional test
|
Refactor availability zone functional test
Using json format output in availability zone list functional test
Change-Id: I7098b1c3bee680e47e414dcb4fa272628cdec1eb
|
Python
|
apache-2.0
|
dtroyer/python-openstackclient,openstack/python-openstackclient,dtroyer/python-openstackclient,openstack/python-openstackclient
|
+
+ import json
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
- HEADERS = ["'Zone Name'"]
- # So far, all components have the same default availability zone name.
- DEFAULT_AZ_NAME = 'nova'
def test_availability_zone_list(self):
- opts = self.get_opts(self.HEADERS)
- raw_output = self.openstack('availability zone list' + opts)
- self.assertIn(self.DEFAULT_AZ_NAME, raw_output)
+ cmd_output = json.loads(self.openstack(
+ 'availability zone list -f json'))
+ zones = [x['Zone Name'] for x in cmd_output]
+ self.assertIn(
+ 'internal',
+ zones
+ )
+ self.assertIn(
+ 'nova',
+ zones
+ )
|
Refactor availability zone functional test
|
## Code Before:
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
HEADERS = ["'Zone Name'"]
# So far, all components have the same default availability zone name.
DEFAULT_AZ_NAME = 'nova'
def test_availability_zone_list(self):
opts = self.get_opts(self.HEADERS)
raw_output = self.openstack('availability zone list' + opts)
self.assertIn(self.DEFAULT_AZ_NAME, raw_output)
## Instruction:
Refactor availability zone functional test
## Code After:
import json
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
def test_availability_zone_list(self):
cmd_output = json.loads(self.openstack(
'availability zone list -f json'))
zones = [x['Zone Name'] for x in cmd_output]
self.assertIn(
'internal',
zones
)
self.assertIn(
'nova',
zones
)
|
+
+ import json
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
- HEADERS = ["'Zone Name'"]
- # So far, all components have the same default availability zone name.
- DEFAULT_AZ_NAME = 'nova'
def test_availability_zone_list(self):
- opts = self.get_opts(self.HEADERS)
- raw_output = self.openstack('availability zone list' + opts)
- self.assertIn(self.DEFAULT_AZ_NAME, raw_output)
+ cmd_output = json.loads(self.openstack(
+ 'availability zone list -f json'))
+ zones = [x['Zone Name'] for x in cmd_output]
+ self.assertIn(
+ 'internal',
+ zones
+ )
+ self.assertIn(
+ 'nova',
+ zones
+ )
|
446680c789ad970316209eeecc947d8e5afddeb7
|
jenny/__init__.py
|
jenny/__init__.py
|
import pandoc
import subprocess
def compile(content, input_format, output_format, *args):
subprocess_arguments = ['pandoc',
'--from=%s' % input_format,
'--to=%s' % output_format]
subprocess_arguments.extend(args)
p = subprocess.Popen(
subprocess_arguments,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE
)
return p.communicate(content)[0]
|
import six
import pandoc
import subprocess
def compile(content, input_format, output_format, *args):
if six.PY2 and isinstance(content, unicode):
content = content.encode("utf8")
subprocess_arguments = ['pandoc',
'--from=%s' % input_format,
'--to=%s' % output_format]
subprocess_arguments.extend(args)
p = subprocess.Popen(
subprocess_arguments,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE
)
return p.communicate(content)[0]
|
Fix a bug on encoding.
|
Fix a bug on encoding.
|
Python
|
mit
|
docloud/jenny
|
+ import six
import pandoc
import subprocess
def compile(content, input_format, output_format, *args):
+ if six.PY2 and isinstance(content, unicode):
+ content = content.encode("utf8")
subprocess_arguments = ['pandoc',
'--from=%s' % input_format,
'--to=%s' % output_format]
subprocess_arguments.extend(args)
p = subprocess.Popen(
subprocess_arguments,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE
)
return p.communicate(content)[0]
|
Fix a bug on encoding.
|
## Code Before:
import pandoc
import subprocess
def compile(content, input_format, output_format, *args):
subprocess_arguments = ['pandoc',
'--from=%s' % input_format,
'--to=%s' % output_format]
subprocess_arguments.extend(args)
p = subprocess.Popen(
subprocess_arguments,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE
)
return p.communicate(content)[0]
## Instruction:
Fix a bug on encoding.
## Code After:
import six
import pandoc
import subprocess
def compile(content, input_format, output_format, *args):
if six.PY2 and isinstance(content, unicode):
content = content.encode("utf8")
subprocess_arguments = ['pandoc',
'--from=%s' % input_format,
'--to=%s' % output_format]
subprocess_arguments.extend(args)
p = subprocess.Popen(
subprocess_arguments,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE
)
return p.communicate(content)[0]
|
+ import six
import pandoc
import subprocess
def compile(content, input_format, output_format, *args):
+ if six.PY2 and isinstance(content, unicode):
+ content = content.encode("utf8")
subprocess_arguments = ['pandoc',
'--from=%s' % input_format,
'--to=%s' % output_format]
subprocess_arguments.extend(args)
p = subprocess.Popen(
subprocess_arguments,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE
)
return p.communicate(content)[0]
|
2b0a11a1adf4167fb55f9b90fc87a8b8518a24a7
|
atmo/apps.py
|
atmo/apps.py
|
from django.apps import AppConfig
from django.conf import settings
import session_csrf
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Under some circumstances (e.g. when calling collectstatic)
# REDIS_URL is not available and we can skip the job schedule registration.
if getattr(settings, 'REDIS_URL'):
# This module contains references to some orm models, so it's
# safer to import it here.
from .schedule import register_job_schedule
# Register rq scheduled jobs
register_job_schedule()
|
from django.apps import AppConfig
from django.conf import settings
import session_csrf
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Under some circumstances (e.g. when calling collectstatic)
# REDIS_URL is not available and we can skip the job schedule registration.
if settings.REDIS_URL.hostname:
# This module contains references to some orm models, so it's
# safer to import it here.
from .schedule import register_job_schedule
# Register rq scheduled jobs
register_job_schedule()
|
Fix rq jobs registration check
|
Fix rq jobs registration check
|
Python
|
mpl-2.0
|
mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service
|
from django.apps import AppConfig
from django.conf import settings
import session_csrf
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Under some circumstances (e.g. when calling collectstatic)
# REDIS_URL is not available and we can skip the job schedule registration.
- if getattr(settings, 'REDIS_URL'):
+ if settings.REDIS_URL.hostname:
# This module contains references to some orm models, so it's
# safer to import it here.
from .schedule import register_job_schedule
# Register rq scheduled jobs
register_job_schedule()
|
Fix rq jobs registration check
|
## Code Before:
from django.apps import AppConfig
from django.conf import settings
import session_csrf
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Under some circumstances (e.g. when calling collectstatic)
# REDIS_URL is not available and we can skip the job schedule registration.
if getattr(settings, 'REDIS_URL'):
# This module contains references to some orm models, so it's
# safer to import it here.
from .schedule import register_job_schedule
# Register rq scheduled jobs
register_job_schedule()
## Instruction:
Fix rq jobs registration check
## Code After:
from django.apps import AppConfig
from django.conf import settings
import session_csrf
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Under some circumstances (e.g. when calling collectstatic)
# REDIS_URL is not available and we can skip the job schedule registration.
if settings.REDIS_URL.hostname:
# This module contains references to some orm models, so it's
# safer to import it here.
from .schedule import register_job_schedule
# Register rq scheduled jobs
register_job_schedule()
|
from django.apps import AppConfig
from django.conf import settings
import session_csrf
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Under some circumstances (e.g. when calling collectstatic)
# REDIS_URL is not available and we can skip the job schedule registration.
- if getattr(settings, 'REDIS_URL'):
+ if settings.REDIS_URL.hostname:
# This module contains references to some orm models, so it's
# safer to import it here.
from .schedule import register_job_schedule
# Register rq scheduled jobs
register_job_schedule()
|
f12bf6096e607e090da7c4e80be2bed3afb5ff5a
|
crmapp/contacts/urls.py
|
crmapp/contacts/urls.py
|
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
)
|
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
url(r'^edit/$',
'crmapp.contacts.views.contact_cru', name='contact_update'
),
)
|
Create the Contacts App - Part II > Edit Contact - Create URL
|
Create the Contacts App - Part II > Edit Contact - Create URL
|
Python
|
mit
|
tabdon/crmeasyapp,tabdon/crmeasyapp,deenaariff/Django
|
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
+ url(r'^edit/$',
+ 'crmapp.contacts.views.contact_cru', name='contact_update'
+ ),
)
|
Create the Contacts App - Part II > Edit Contact - Create URL
|
## Code Before:
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
)
## Instruction:
Create the Contacts App - Part II > Edit Contact - Create URL
## Code After:
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
url(r'^edit/$',
'crmapp.contacts.views.contact_cru', name='contact_update'
),
)
|
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
+ url(r'^edit/$',
+ 'crmapp.contacts.views.contact_cru', name='contact_update'
+ ),
)
|
b6a2ba81c9ddd642cfa271cab809a5c2511f7204
|
app/auth/forms.py
|
app/auth/forms.py
|
from flask_wtf import Form
from wtforms import (
StringField, PasswordField, BooleanField, SubmitField,
ValidationError,
)
from wtforms.validators import (
InputRequired, Length, Email, Regexp, EqualTo,
)
from app.models import User
class LoginForm(Form):
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
password = PasswordField('Senha', validators=[InputRequired()])
remember_me = BooleanField('Lembrar')
submit = SubmitField('Log In')
class RegistrationForm(Form):
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
username = StringField('Username', validators=[
InputRequired(), Length(1, 64)])
password = PasswordField('Senha', validators=[
InputRequired(), EqualTo('password2',
message='Senhas devem ser iguais')])
password2 = PasswordField('Confirmar senha', validators=[InputRequired()])
submit = SubmitField('Registrar')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Esse email já está em uso!')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Esse usuário já está em uso!')
|
from flask_wtf import FlaskForm
from wtforms import (
StringField, PasswordField, BooleanField, SubmitField,
ValidationError,
)
from wtforms.validators import (
InputRequired, Length, Email, Regexp, EqualTo,
)
from app.models import User
class LoginForm(FlaskForm):
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
password = PasswordField('Senha', validators=[InputRequired()])
remember_me = BooleanField('Lembrar')
submit = SubmitField('Log In')
class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
username = StringField('Username', validators=[
InputRequired(), Length(1, 64)])
password = PasswordField('Senha', validators=[
InputRequired(), EqualTo('password2',
message='Senhas devem ser iguais')])
password2 = PasswordField('Confirmar senha', validators=[InputRequired()])
submit = SubmitField('Registrar')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Esse email já está em uso!')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Esse usuário já está em uso!')
|
Change Form to FlaskForm (previous is deprecated)
|
:art: Change Form to FlaskForm (previous is deprecated)
|
Python
|
mit
|
gems-uff/labsys,gems-uff/labsys,gems-uff/labsys
|
- from flask_wtf import Form
+ from flask_wtf import FlaskForm
from wtforms import (
StringField, PasswordField, BooleanField, SubmitField,
ValidationError,
)
from wtforms.validators import (
InputRequired, Length, Email, Regexp, EqualTo,
)
from app.models import User
- class LoginForm(Form):
+ class LoginForm(FlaskForm):
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
password = PasswordField('Senha', validators=[InputRequired()])
remember_me = BooleanField('Lembrar')
submit = SubmitField('Log In')
- class RegistrationForm(Form):
+ class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
username = StringField('Username', validators=[
InputRequired(), Length(1, 64)])
password = PasswordField('Senha', validators=[
InputRequired(), EqualTo('password2',
message='Senhas devem ser iguais')])
password2 = PasswordField('Confirmar senha', validators=[InputRequired()])
submit = SubmitField('Registrar')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Esse email já está em uso!')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Esse usuário já está em uso!')
|
Change Form to FlaskForm (previous is deprecated)
|
## Code Before:
from flask_wtf import Form
from wtforms import (
StringField, PasswordField, BooleanField, SubmitField,
ValidationError,
)
from wtforms.validators import (
InputRequired, Length, Email, Regexp, EqualTo,
)
from app.models import User
class LoginForm(Form):
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
password = PasswordField('Senha', validators=[InputRequired()])
remember_me = BooleanField('Lembrar')
submit = SubmitField('Log In')
class RegistrationForm(Form):
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
username = StringField('Username', validators=[
InputRequired(), Length(1, 64)])
password = PasswordField('Senha', validators=[
InputRequired(), EqualTo('password2',
message='Senhas devem ser iguais')])
password2 = PasswordField('Confirmar senha', validators=[InputRequired()])
submit = SubmitField('Registrar')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Esse email já está em uso!')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Esse usuário já está em uso!')
## Instruction:
Change Form to FlaskForm (previous is deprecated)
## Code After:
from flask_wtf import FlaskForm
from wtforms import (
StringField, PasswordField, BooleanField, SubmitField,
ValidationError,
)
from wtforms.validators import (
InputRequired, Length, Email, Regexp, EqualTo,
)
from app.models import User
class LoginForm(FlaskForm):
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
password = PasswordField('Senha', validators=[InputRequired()])
remember_me = BooleanField('Lembrar')
submit = SubmitField('Log In')
class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
username = StringField('Username', validators=[
InputRequired(), Length(1, 64)])
password = PasswordField('Senha', validators=[
InputRequired(), EqualTo('password2',
message='Senhas devem ser iguais')])
password2 = PasswordField('Confirmar senha', validators=[InputRequired()])
submit = SubmitField('Registrar')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Esse email já está em uso!')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Esse usuário já está em uso!')
|
- from flask_wtf import Form
+ from flask_wtf import FlaskForm
? +++++
from wtforms import (
StringField, PasswordField, BooleanField, SubmitField,
ValidationError,
)
from wtforms.validators import (
InputRequired, Length, Email, Regexp, EqualTo,
)
from app.models import User
- class LoginForm(Form):
+ class LoginForm(FlaskForm):
? +++++
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
password = PasswordField('Senha', validators=[InputRequired()])
remember_me = BooleanField('Lembrar')
submit = SubmitField('Log In')
- class RegistrationForm(Form):
+ class RegistrationForm(FlaskForm):
? +++++
email = StringField('Email', validators=[
InputRequired(), Length(1, 64), Email()])
username = StringField('Username', validators=[
InputRequired(), Length(1, 64)])
password = PasswordField('Senha', validators=[
InputRequired(), EqualTo('password2',
message='Senhas devem ser iguais')])
password2 = PasswordField('Confirmar senha', validators=[InputRequired()])
submit = SubmitField('Registrar')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Esse email já está em uso!')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Esse usuário já está em uso!')
|
eae216cc2d1bbe6e1c1aab1c4cf53d57b29b057c
|
froide/helper/csv_utils.py
|
froide/helper/csv_utils.py
|
from django.utils import six
from django.http import StreamingHttpResponse
def export_csv_response(queryset, fields, name='export.csv'):
response = StreamingHttpResponse(export_csv(queryset, fields),
content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="%s"' % name
return response
class FakeFile(object):
# unicodecsv doesn't return values
# so temp store them in here
def write(self, string):
self._last_string = string
if six.PY3:
self._last_string = self._last_string.encode('utf-8')
def export_csv(queryset, fields):
if six.PY3:
import csv
else:
import unicodecsv as csv
f = FakeFile()
writer = csv.DictWriter(f, fields)
writer.writeheader()
yield f._last_string
for obj in queryset:
if hasattr(obj, 'get_dict'):
d = obj.get_dict(fields)
else:
d = {}
for field in fields:
value = getattr(obj, field, '')
if value is None:
d[field] = ""
else:
d[field] = six.text_type(value)
writer.writerow(d)
yield f._last_string
def export_csv_bytes(generator):
return six.binary_type().join(generator)
|
from django.utils import six
from django.http import StreamingHttpResponse
def export_csv_response(generator, name='export.csv'):
response = StreamingHttpResponse(generator, content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="%s"' % name
return response
class FakeFile(object):
# unicodecsv doesn't return values
# so temp store them in here
def write(self, string):
self._last_string = string
if six.PY3:
self._last_string = self._last_string.encode('utf-8')
def export_csv(queryset, fields):
if six.PY3:
import csv
else:
import unicodecsv as csv
f = FakeFile()
writer = csv.DictWriter(f, fields)
writer.writeheader()
yield f._last_string
for obj in queryset:
if hasattr(obj, 'get_dict'):
d = obj.get_dict(fields)
else:
d = {}
for field in fields:
value = getattr(obj, field, '')
if value is None:
d[field] = ""
else:
d[field] = six.text_type(value)
writer.writerow(d)
yield f._last_string
def export_csv_bytes(generator):
return six.binary_type().join(generator)
|
Fix export_csv_response function to take generator
|
Fix export_csv_response function to take generator
|
Python
|
mit
|
LilithWittmann/froide,catcosmo/froide,fin/froide,stefanw/froide,catcosmo/froide,catcosmo/froide,ryankanno/froide,catcosmo/froide,catcosmo/froide,ryankanno/froide,okfse/froide,fin/froide,CodeforHawaii/froide,CodeforHawaii/froide,okfse/froide,stefanw/froide,stefanw/froide,LilithWittmann/froide,okfse/froide,ryankanno/froide,ryankanno/froide,CodeforHawaii/froide,LilithWittmann/froide,LilithWittmann/froide,okfse/froide,stefanw/froide,LilithWittmann/froide,CodeforHawaii/froide,ryankanno/froide,fin/froide,fin/froide,CodeforHawaii/froide,stefanw/froide,okfse/froide
|
from django.utils import six
from django.http import StreamingHttpResponse
- def export_csv_response(queryset, fields, name='export.csv'):
+ def export_csv_response(generator, name='export.csv'):
+ response = StreamingHttpResponse(generator, content_type='text/csv')
- response = StreamingHttpResponse(export_csv(queryset, fields),
- content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="%s"' % name
return response
class FakeFile(object):
# unicodecsv doesn't return values
# so temp store them in here
def write(self, string):
self._last_string = string
if six.PY3:
self._last_string = self._last_string.encode('utf-8')
def export_csv(queryset, fields):
if six.PY3:
import csv
else:
import unicodecsv as csv
f = FakeFile()
writer = csv.DictWriter(f, fields)
writer.writeheader()
yield f._last_string
for obj in queryset:
if hasattr(obj, 'get_dict'):
d = obj.get_dict(fields)
else:
d = {}
for field in fields:
value = getattr(obj, field, '')
if value is None:
d[field] = ""
else:
d[field] = six.text_type(value)
writer.writerow(d)
yield f._last_string
def export_csv_bytes(generator):
return six.binary_type().join(generator)
|
Fix export_csv_response function to take generator
|
## Code Before:
from django.utils import six
from django.http import StreamingHttpResponse
def export_csv_response(queryset, fields, name='export.csv'):
response = StreamingHttpResponse(export_csv(queryset, fields),
content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="%s"' % name
return response
class FakeFile(object):
# unicodecsv doesn't return values
# so temp store them in here
def write(self, string):
self._last_string = string
if six.PY3:
self._last_string = self._last_string.encode('utf-8')
def export_csv(queryset, fields):
if six.PY3:
import csv
else:
import unicodecsv as csv
f = FakeFile()
writer = csv.DictWriter(f, fields)
writer.writeheader()
yield f._last_string
for obj in queryset:
if hasattr(obj, 'get_dict'):
d = obj.get_dict(fields)
else:
d = {}
for field in fields:
value = getattr(obj, field, '')
if value is None:
d[field] = ""
else:
d[field] = six.text_type(value)
writer.writerow(d)
yield f._last_string
def export_csv_bytes(generator):
return six.binary_type().join(generator)
## Instruction:
Fix export_csv_response function to take generator
## Code After:
from django.utils import six
from django.http import StreamingHttpResponse
def export_csv_response(generator, name='export.csv'):
response = StreamingHttpResponse(generator, content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="%s"' % name
return response
class FakeFile(object):
# unicodecsv doesn't return values
# so temp store them in here
def write(self, string):
self._last_string = string
if six.PY3:
self._last_string = self._last_string.encode('utf-8')
def export_csv(queryset, fields):
if six.PY3:
import csv
else:
import unicodecsv as csv
f = FakeFile()
writer = csv.DictWriter(f, fields)
writer.writeheader()
yield f._last_string
for obj in queryset:
if hasattr(obj, 'get_dict'):
d = obj.get_dict(fields)
else:
d = {}
for field in fields:
value = getattr(obj, field, '')
if value is None:
d[field] = ""
else:
d[field] = six.text_type(value)
writer.writerow(d)
yield f._last_string
def export_csv_bytes(generator):
return six.binary_type().join(generator)
|
from django.utils import six
from django.http import StreamingHttpResponse
- def export_csv_response(queryset, fields, name='export.csv'):
? ^^ ^^^ ^^^^^^^^
+ def export_csv_response(generator, name='export.csv'):
? ^^^ ^ ^^
+ response = StreamingHttpResponse(generator, content_type='text/csv')
- response = StreamingHttpResponse(export_csv(queryset, fields),
- content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="%s"' % name
return response
class FakeFile(object):
# unicodecsv doesn't return values
# so temp store them in here
def write(self, string):
self._last_string = string
if six.PY3:
self._last_string = self._last_string.encode('utf-8')
def export_csv(queryset, fields):
if six.PY3:
import csv
else:
import unicodecsv as csv
f = FakeFile()
writer = csv.DictWriter(f, fields)
writer.writeheader()
yield f._last_string
for obj in queryset:
if hasattr(obj, 'get_dict'):
d = obj.get_dict(fields)
else:
d = {}
for field in fields:
value = getattr(obj, field, '')
if value is None:
d[field] = ""
else:
d[field] = six.text_type(value)
writer.writerow(d)
yield f._last_string
def export_csv_bytes(generator):
return six.binary_type().join(generator)
|
c80a68b81e936435434931f0b5bf748bcbea54dc
|
statistics/webui.py
|
statistics/webui.py
|
from flask import render_template, g, redirect, request
from db import connect_db, get_all_sum
from statistics import app
@app.before_request
def before_request():
g.db = connect_db()
g.fields = ["CPU", "TOTAL", "SQL", "SOLR", "REDIS", "MEMCACHED"]
@app.route("/")
def main_page():
sort_by = request.args.get('sort_by', None)
data = get_all_sum(g.db)
if sort_by:
data = sorted(data, key=lambda row: row[sort_by])
return render_template("main_page.html", data=data)
@app.route("/add/")
def add_page():
key = request.args.get('KEY')
for field in g.fields:
new_val = int(request.args.get(field, '0'))
old_val = int(g.db.hget(key, field) or '0')
new_val += old_val
g.db.hset(key, field, new_val)
g.db.hincrby(key, "REQUESTS", "1")
return redirect("/")
|
from flask import render_template, g, redirect, request
from db import connect_db, get_all_sum
from statistics import app
@app.before_request
def before_request():
g.db = connect_db()
g.fields = ["CPU", "TOTAL", "SQL", "SOLR", "REDIS", "MEMCACHED"]
@app.route("/")
def main_page():
sort_by = request.args.get('sort_by', None)
data = get_all_sum(g.db)
if sort_by:
data = sorted(data, key=lambda row: row[sort_by])
return render_template("main_page.html", data=data)
@app.route("/average/")
def average():
data = get_all_sum(g.db)
for row in data:
req_count = row['REQUESTS']
for k in row:
if k != 'NAME' and k != 'REQUESTS':
row[k] = float(row[k])/req_count
return render_template("main_page.html", data=data)
@app.route("/add/")
def add_page():
key = request.args.get('KEY')
for field in g.fields:
new_val = int(request.args.get(field, '0'))
old_val = int(g.db.hget(key, field) or '0')
new_val += old_val
g.db.hset(key, field, new_val)
g.db.hincrby(key, "REQUESTS", "1")
return redirect("/")
|
Add proto of average page. Without sorting.
|
Add proto of average page. Without sorting.
|
Python
|
mit
|
uvNikita/appstats,uvNikita/appstats,uvNikita/appstats
|
from flask import render_template, g, redirect, request
from db import connect_db, get_all_sum
from statistics import app
@app.before_request
def before_request():
g.db = connect_db()
g.fields = ["CPU", "TOTAL", "SQL", "SOLR", "REDIS", "MEMCACHED"]
@app.route("/")
def main_page():
sort_by = request.args.get('sort_by', None)
data = get_all_sum(g.db)
if sort_by:
data = sorted(data, key=lambda row: row[sort_by])
return render_template("main_page.html", data=data)
+ @app.route("/average/")
+ def average():
+ data = get_all_sum(g.db)
+ for row in data:
+ req_count = row['REQUESTS']
+ for k in row:
+ if k != 'NAME' and k != 'REQUESTS':
+ row[k] = float(row[k])/req_count
+ return render_template("main_page.html", data=data)
+
@app.route("/add/")
def add_page():
key = request.args.get('KEY')
for field in g.fields:
new_val = int(request.args.get(field, '0'))
old_val = int(g.db.hget(key, field) or '0')
new_val += old_val
g.db.hset(key, field, new_val)
g.db.hincrby(key, "REQUESTS", "1")
return redirect("/")
|
Add proto of average page. Without sorting.
|
## Code Before:
from flask import render_template, g, redirect, request
from db import connect_db, get_all_sum
from statistics import app
@app.before_request
def before_request():
g.db = connect_db()
g.fields = ["CPU", "TOTAL", "SQL", "SOLR", "REDIS", "MEMCACHED"]
@app.route("/")
def main_page():
sort_by = request.args.get('sort_by', None)
data = get_all_sum(g.db)
if sort_by:
data = sorted(data, key=lambda row: row[sort_by])
return render_template("main_page.html", data=data)
@app.route("/add/")
def add_page():
key = request.args.get('KEY')
for field in g.fields:
new_val = int(request.args.get(field, '0'))
old_val = int(g.db.hget(key, field) or '0')
new_val += old_val
g.db.hset(key, field, new_val)
g.db.hincrby(key, "REQUESTS", "1")
return redirect("/")
## Instruction:
Add proto of average page. Without sorting.
## Code After:
from flask import render_template, g, redirect, request
from db import connect_db, get_all_sum
from statistics import app
@app.before_request
def before_request():
g.db = connect_db()
g.fields = ["CPU", "TOTAL", "SQL", "SOLR", "REDIS", "MEMCACHED"]
@app.route("/")
def main_page():
sort_by = request.args.get('sort_by', None)
data = get_all_sum(g.db)
if sort_by:
data = sorted(data, key=lambda row: row[sort_by])
return render_template("main_page.html", data=data)
@app.route("/average/")
def average():
data = get_all_sum(g.db)
for row in data:
req_count = row['REQUESTS']
for k in row:
if k != 'NAME' and k != 'REQUESTS':
row[k] = float(row[k])/req_count
return render_template("main_page.html", data=data)
@app.route("/add/")
def add_page():
key = request.args.get('KEY')
for field in g.fields:
new_val = int(request.args.get(field, '0'))
old_val = int(g.db.hget(key, field) or '0')
new_val += old_val
g.db.hset(key, field, new_val)
g.db.hincrby(key, "REQUESTS", "1")
return redirect("/")
|
from flask import render_template, g, redirect, request
from db import connect_db, get_all_sum
from statistics import app
@app.before_request
def before_request():
g.db = connect_db()
g.fields = ["CPU", "TOTAL", "SQL", "SOLR", "REDIS", "MEMCACHED"]
@app.route("/")
def main_page():
sort_by = request.args.get('sort_by', None)
data = get_all_sum(g.db)
if sort_by:
data = sorted(data, key=lambda row: row[sort_by])
return render_template("main_page.html", data=data)
+ @app.route("/average/")
+ def average():
+ data = get_all_sum(g.db)
+ for row in data:
+ req_count = row['REQUESTS']
+ for k in row:
+ if k != 'NAME' and k != 'REQUESTS':
+ row[k] = float(row[k])/req_count
+ return render_template("main_page.html", data=data)
+
@app.route("/add/")
def add_page():
key = request.args.get('KEY')
for field in g.fields:
new_val = int(request.args.get(field, '0'))
old_val = int(g.db.hget(key, field) or '0')
new_val += old_val
g.db.hset(key, field, new_val)
g.db.hincrby(key, "REQUESTS", "1")
return redirect("/")
|
df56478315c7b58526fbecf3fdfc4df5326d5ba0
|
custom_fixers/fix_alt_unicode.py
|
custom_fixers/fix_alt_unicode.py
|
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, BlankLine
class FixAltUnicode(fixer_base.BaseFix):
PATTERN = """
func=funcdef< 'def' name='__unicode__'
parameters< '(' NAME ')' > any+ >
"""
def transform(self, node, results):
name = results['name']
name.replace(Name('__str__', prefix=name.prefix))
|
from lib2to3 import fixer_base
class FixAltUnicode(fixer_base.BaseFix):
PATTERN = "'__unicode__'"
def transform(self, node, results):
new = node.clone()
new.value = '__str__'
return new
|
Simplify python3 unicode fixer and make it replace all occurrences of __unicode__ with __str__.
|
Simplify python3 unicode fixer and make it replace all occurrences of __unicode__ with __str__.
|
Python
|
mit
|
andreas-h/pybtex,andreas-h/pybtex,chbrown/pybtex,chbrown/pybtex
|
from lib2to3 import fixer_base
- from lib2to3.fixer_util import Name, BlankLine
class FixAltUnicode(fixer_base.BaseFix):
+ PATTERN = "'__unicode__'"
- PATTERN = """
- func=funcdef< 'def' name='__unicode__'
- parameters< '(' NAME ')' > any+ >
- """
def transform(self, node, results):
- name = results['name']
- name.replace(Name('__str__', prefix=name.prefix))
+ new = node.clone()
+ new.value = '__str__'
+ return new
|
Simplify python3 unicode fixer and make it replace all occurrences of __unicode__ with __str__.
|
## Code Before:
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, BlankLine
class FixAltUnicode(fixer_base.BaseFix):
PATTERN = """
func=funcdef< 'def' name='__unicode__'
parameters< '(' NAME ')' > any+ >
"""
def transform(self, node, results):
name = results['name']
name.replace(Name('__str__', prefix=name.prefix))
## Instruction:
Simplify python3 unicode fixer and make it replace all occurrences of __unicode__ with __str__.
## Code After:
from lib2to3 import fixer_base
class FixAltUnicode(fixer_base.BaseFix):
PATTERN = "'__unicode__'"
def transform(self, node, results):
new = node.clone()
new.value = '__str__'
return new
|
from lib2to3 import fixer_base
- from lib2to3.fixer_util import Name, BlankLine
class FixAltUnicode(fixer_base.BaseFix):
+ PATTERN = "'__unicode__'"
- PATTERN = """
- func=funcdef< 'def' name='__unicode__'
- parameters< '(' NAME ')' > any+ >
- """
def transform(self, node, results):
- name = results['name']
- name.replace(Name('__str__', prefix=name.prefix))
+ new = node.clone()
+ new.value = '__str__'
+ return new
|
b202e1cc5e6c5aa65c3ed22ad1e78ec505fa36c4
|
cmsplugin_rst/forms.py
|
cmsplugin_rst/forms.py
|
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
|
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
fields = ["name", "body"]
|
Add "fields" attribute to ModelForm.
|
Add "fields" attribute to ModelForm.
|
Python
|
bsd-3-clause
|
pakal/cmsplugin-rst,ojii/cmsplugin-rst
|
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
+ fields = ["name", "body"]
|
Add "fields" attribute to ModelForm.
|
## Code Before:
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
## Instruction:
Add "fields" attribute to ModelForm.
## Code After:
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
fields = ["name", "body"]
|
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
+ fields = ["name", "body"]
|
4d8ee930b772329b4c3ded17a5a04efb7dada977
|
tests/test__compat.py
|
tests/test__compat.py
|
import pytest
import numpy as np
import dask
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
|
import pytest
import numpy as np
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
|
Drop unused import from _compat tests
|
Drop unused import from _compat tests
|
Python
|
bsd-3-clause
|
jakirkham/dask-distance
|
import pytest
import numpy as np
- import dask
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
|
Drop unused import from _compat tests
|
## Code Before:
import pytest
import numpy as np
import dask
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
## Instruction:
Drop unused import from _compat tests
## Code After:
import pytest
import numpy as np
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
|
import pytest
import numpy as np
- import dask
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
|
47bb8e983dad168451d65c0032f5568357a8d359
|
battlesnake/plugins/imc2/triggers.py
|
battlesnake/plugins/imc2/triggers.py
|
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
Adjust IMC2 trigger regex to handle multiple colons correctly.
|
Adjust IMC2 trigger regex to handle multiple colons correctly.
|
Python
|
bsd-3-clause
|
gtaylor/btmux_battlesnake
|
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
- line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
+ line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
Adjust IMC2 trigger regex to handle multiple colons correctly.
|
## Code Before:
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
## Instruction:
Adjust IMC2 trigger regex to handle multiple colons correctly.
## Code After:
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
- line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
? ^
+ line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
? ++++++++ ^^^^^ + +
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
a90411116617096c73ba6d188322613a1b529a62
|
books/CrackingCodesWithPython/Chapter20/vigenereDictionaryHacker.py
|
books/CrackingCodesWithPython/Chapter20/vigenereDictionaryHacker.py
|
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
Update vigenereDicitonaryHacker: simplified comparison with None
|
Update vigenereDicitonaryHacker: simplified comparison with None
|
Python
|
mit
|
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
|
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
- if hackedMessage != None:
+ if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
Update vigenereDicitonaryHacker: simplified comparison with None
|
## Code Before:
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
## Instruction:
Update vigenereDicitonaryHacker: simplified comparison with None
## Code After:
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
- if hackedMessage != None:
? --------
+ if not hackedMessage:
? ++++
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
87f4bb8cdcb607cb4f15ecbda9a3cb50a3fd5319
|
src/webargs/__init__.py
|
src/webargs/__init__.py
|
from distutils.version import LooseVersion
from marshmallow.utils import missing
# Make marshmallow's validation functions importable from webargs
from marshmallow import validate
from webargs.core import ValidationError
from webargs.dict2schema import dict2schema
from webargs import fields
__version__ = "5.5.0"
__version_info__ = tuple(LooseVersion(__version__).version)
__author__ = "Steven Loria"
__license__ = "MIT"
__all__ = ("dict2schema", "ValidationError", "fields", "missing", "validate")
|
from distutils.version import LooseVersion
from marshmallow.utils import missing
# Make marshmallow's validation functions importable from webargs
from marshmallow import validate
from webargs.core import ValidationError
from webargs.dict2schema import dict2schema
from webargs import fields
__version__ = "5.5.0"
__version_info__ = tuple(LooseVersion(__version__).version)
__all__ = ("dict2schema", "ValidationError", "fields", "missing", "validate")
|
Remove unnnecessary __author__ and __license__
|
Remove unnnecessary __author__ and __license__
|
Python
|
mit
|
sloria/webargs
|
from distutils.version import LooseVersion
from marshmallow.utils import missing
# Make marshmallow's validation functions importable from webargs
from marshmallow import validate
from webargs.core import ValidationError
from webargs.dict2schema import dict2schema
from webargs import fields
__version__ = "5.5.0"
__version_info__ = tuple(LooseVersion(__version__).version)
- __author__ = "Steven Loria"
- __license__ = "MIT"
-
-
__all__ = ("dict2schema", "ValidationError", "fields", "missing", "validate")
|
Remove unnnecessary __author__ and __license__
|
## Code Before:
from distutils.version import LooseVersion
from marshmallow.utils import missing
# Make marshmallow's validation functions importable from webargs
from marshmallow import validate
from webargs.core import ValidationError
from webargs.dict2schema import dict2schema
from webargs import fields
__version__ = "5.5.0"
__version_info__ = tuple(LooseVersion(__version__).version)
__author__ = "Steven Loria"
__license__ = "MIT"
__all__ = ("dict2schema", "ValidationError", "fields", "missing", "validate")
## Instruction:
Remove unnnecessary __author__ and __license__
## Code After:
from distutils.version import LooseVersion
from marshmallow.utils import missing
# Make marshmallow's validation functions importable from webargs
from marshmallow import validate
from webargs.core import ValidationError
from webargs.dict2schema import dict2schema
from webargs import fields
__version__ = "5.5.0"
__version_info__ = tuple(LooseVersion(__version__).version)
__all__ = ("dict2schema", "ValidationError", "fields", "missing", "validate")
|
from distutils.version import LooseVersion
from marshmallow.utils import missing
# Make marshmallow's validation functions importable from webargs
from marshmallow import validate
from webargs.core import ValidationError
from webargs.dict2schema import dict2schema
from webargs import fields
__version__ = "5.5.0"
__version_info__ = tuple(LooseVersion(__version__).version)
- __author__ = "Steven Loria"
- __license__ = "MIT"
-
-
__all__ = ("dict2schema", "ValidationError", "fields", "missing", "validate")
|
7f1ddec9e170941e3a5159236ede817c2d569f38
|
graphical_tests/test_partition.py
|
graphical_tests/test_partition.py
|
from skimage import draw
import matplotlib.pyplot as plt
import numpy as np
import photomosaic as pm
img = np.zeros((1000, 1000))
rr, cc = draw.circle(300, 500, 150)
img[rr, cc] = 1
tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3)
plt.imshow(pm.draw_tiles(img, tiles, color=0.5))
plt.savefig('test-partition.png')
|
from skimage import draw
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import photomosaic as pm
img = np.zeros((1000, 1000))
rr, cc = draw.circle(300, 500, 150)
img[rr, cc] = 1
tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3)
plt.imshow(pm.draw_tile_layout(img, tiles, color=0.5))
plt.savefig('test-partition.png')
|
Update test to match API.
|
TST: Update test to match API.
|
Python
|
bsd-3-clause
|
danielballan/photomosaic
|
from skimage import draw
+ import matplotlib
+ matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import photomosaic as pm
img = np.zeros((1000, 1000))
rr, cc = draw.circle(300, 500, 150)
img[rr, cc] = 1
tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3)
- plt.imshow(pm.draw_tiles(img, tiles, color=0.5))
+ plt.imshow(pm.draw_tile_layout(img, tiles, color=0.5))
plt.savefig('test-partition.png')
|
Update test to match API.
|
## Code Before:
from skimage import draw
import matplotlib.pyplot as plt
import numpy as np
import photomosaic as pm
img = np.zeros((1000, 1000))
rr, cc = draw.circle(300, 500, 150)
img[rr, cc] = 1
tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3)
plt.imshow(pm.draw_tiles(img, tiles, color=0.5))
plt.savefig('test-partition.png')
## Instruction:
Update test to match API.
## Code After:
from skimage import draw
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import photomosaic as pm
img = np.zeros((1000, 1000))
rr, cc = draw.circle(300, 500, 150)
img[rr, cc] = 1
tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3)
plt.imshow(pm.draw_tile_layout(img, tiles, color=0.5))
plt.savefig('test-partition.png')
|
from skimage import draw
+ import matplotlib
+ matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import photomosaic as pm
img = np.zeros((1000, 1000))
rr, cc = draw.circle(300, 500, 150)
img[rr, cc] = 1
tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3)
- plt.imshow(pm.draw_tiles(img, tiles, color=0.5))
? ^
+ plt.imshow(pm.draw_tile_layout(img, tiles, color=0.5))
? ^^^^^^^
plt.savefig('test-partition.png')
|
243cf3c18228b0c50b6b48a69c420922576ed723
|
grano/logic/plugins.py
|
grano/logic/plugins.py
|
import logging
from grano.model import Entity, Relation, Project, Schema
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in Schema.all():
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
for i, entity in enumerate(Entity.all().filter_by(same_as=None)):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
for i, relation in enumerate(Relation.all()):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
|
import logging
from grano.model import Entity, Relation, Project
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in project.schemata:
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
eq = Entity.all().filter_by(same_as=None)
eq = eq.filter_by(project=project)
for i, entity in enumerate(eq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
rq = Relation.all().filter_by(project=project)
for i, relation in enumerate(rq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
|
Rebuild by project, not by type.
|
Rebuild by project, not by type.
|
Python
|
mit
|
4bic-attic/grano,granoproject/grano,CodeForAfrica/grano,4bic/grano
|
import logging
- from grano.model import Entity, Relation, Project, Schema
+ from grano.model import Entity, Relation, Project
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
- for schema in Schema.all():
+ for schema in project.schemata:
- _schema_changed(schema.project.slug, schema.name, 'delete')
+ _schema_changed(schema.project.slug, schema.name, 'delete')
- _schema_changed(schema.project.slug, schema.name, 'create')
+ _schema_changed(schema.project.slug, schema.name, 'create')
- for i, entity in enumerate(Entity.all().filter_by(same_as=None)):
+ eq = Entity.all().filter_by(same_as=None)
+ eq = eq.filter_by(project=project)
+ for i, entity in enumerate(eq):
- if i > 0 and i % 1000 == 0:
+ if i > 0 and i % 1000 == 0:
- log.info("Rebuilt: %s entities", i)
+ log.info("Rebuilt: %s entities", i)
- _entity_changed(entity.id, 'delete')
+ _entity_changed(entity.id, 'delete')
- _entity_changed(entity.id, 'create')
+ _entity_changed(entity.id, 'create')
+ rq = Relation.all().filter_by(project=project)
- for i, relation in enumerate(Relation.all()):
+ for i, relation in enumerate(rq):
- if i > 0 and i % 1000 == 0:
+ if i > 0 and i % 1000 == 0:
- log.info("Rebuilt: %s relation", i)
+ log.info("Rebuilt: %s relation", i)
- _relation_changed(relation.id, 'delete')
+ _relation_changed(relation.id, 'delete')
- _relation_changed(relation.id, 'create')
+ _relation_changed(relation.id, 'create')
|
Rebuild by project, not by type.
|
## Code Before:
import logging
from grano.model import Entity, Relation, Project, Schema
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in Schema.all():
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
for i, entity in enumerate(Entity.all().filter_by(same_as=None)):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
for i, relation in enumerate(Relation.all()):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
## Instruction:
Rebuild by project, not by type.
## Code After:
import logging
from grano.model import Entity, Relation, Project
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in project.schemata:
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
eq = Entity.all().filter_by(same_as=None)
eq = eq.filter_by(project=project)
for i, entity in enumerate(eq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
rq = Relation.all().filter_by(project=project)
for i, relation in enumerate(rq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
|
import logging
- from grano.model import Entity, Relation, Project, Schema
? --------
+ from grano.model import Entity, Relation, Project
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
- for schema in Schema.all():
+ for schema in project.schemata:
- _schema_changed(schema.project.slug, schema.name, 'delete')
+ _schema_changed(schema.project.slug, schema.name, 'delete')
? ++++
- _schema_changed(schema.project.slug, schema.name, 'create')
+ _schema_changed(schema.project.slug, schema.name, 'create')
? ++++
- for i, entity in enumerate(Entity.all().filter_by(same_as=None)):
? --- ^^ ^^^^^ ^^ ---------- --
+ eq = Entity.all().filter_by(same_as=None)
? ^^ ^ ^
+ eq = eq.filter_by(project=project)
+ for i, entity in enumerate(eq):
- if i > 0 and i % 1000 == 0:
+ if i > 0 and i % 1000 == 0:
? ++++
- log.info("Rebuilt: %s entities", i)
+ log.info("Rebuilt: %s entities", i)
? ++++
- _entity_changed(entity.id, 'delete')
+ _entity_changed(entity.id, 'delete')
? ++++
- _entity_changed(entity.id, 'create')
+ _entity_changed(entity.id, 'create')
? ++++
+ rq = Relation.all().filter_by(project=project)
- for i, relation in enumerate(Relation.all()):
? ^^^^^^^^^^^^^^
+ for i, relation in enumerate(rq):
? ++++ ^^
- if i > 0 and i % 1000 == 0:
+ if i > 0 and i % 1000 == 0:
? ++++
- log.info("Rebuilt: %s relation", i)
+ log.info("Rebuilt: %s relation", i)
? ++++
- _relation_changed(relation.id, 'delete')
+ _relation_changed(relation.id, 'delete')
? ++++
- _relation_changed(relation.id, 'create')
+ _relation_changed(relation.id, 'create')
? ++++
|
4927a1c29d258b1ab7c70ffecff6904b808480eb
|
bokeh/validation/warnings.py
|
bokeh/validation/warnings.py
|
''' Define standard warning codes and messages for Bokeh validation checks.
1000 : *MISSING_RENDERERS*
A |Plot| object has no renderers configured (will result in a blank plot).
1001 : *NO_GLYPH_RENDERERS*
A |Plot| object has no glyph renderers (will result in an empty plot frame).
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
9999 : *EXT*
Indicates that a custom warning check has failed.
'''
codes = {
1000: ("MISSING_RENDERERS", "Plot has no renderers"),
1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
1002: ("EMPTY_LAYOUT", "Layout has no children"),
1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
9999: ("EXT", "Custom extension reports warning"),
}
for code in codes:
exec("%s = %d" % (codes[code][0], code))
|
''' Define standard warning codes and messages for Bokeh validation checks.
1000 : *MISSING_RENDERERS*
A |Plot| object has no renderers configured (will result in a blank plot).
1001 : *NO_GLYPH_RENDERERS*
A |Plot| object has no glyph renderers (will result in an empty plot frame).
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
1003 : *COLON_IN_CATEGORY_LABEL*
Category label contains colons (will result in a blank layout).
9999 : *EXT*
Indicates that a custom warning check has failed.
'''
codes = {
1000: ("MISSING_RENDERERS", "Plot has no renderers"),
1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
1002: ("EMPTY_LAYOUT", "Layout has no children"),
1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
9999: ("EXT", "Custom extension reports warning"),
}
for code in codes:
exec("%s = %d" % (codes[code][0], code))
|
Add module level documentation for colon warning
|
Add module level documentation for colon warning
|
Python
|
bsd-3-clause
|
ericmjl/bokeh,timsnyder/bokeh,draperjames/bokeh,philippjfr/bokeh,mindriot101/bokeh,aavanian/bokeh,aavanian/bokeh,phobson/bokeh,daodaoliang/bokeh,KasperPRasmussen/bokeh,rothnic/bokeh,dennisobrien/bokeh,phobson/bokeh,DuCorey/bokeh,timsnyder/bokeh,jplourenco/bokeh,ericdill/bokeh,srinathv/bokeh,bokeh/bokeh,srinathv/bokeh,justacec/bokeh,clairetang6/bokeh,DuCorey/bokeh,ChinaQuants/bokeh,paultcochrane/bokeh,deeplook/bokeh,mindriot101/bokeh,ericdill/bokeh,philippjfr/bokeh,dennisobrien/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,htygithub/bokeh,khkaminska/bokeh,ptitjano/bokeh,saifrahmed/bokeh,ericmjl/bokeh,matbra/bokeh,schoolie/bokeh,justacec/bokeh,jakirkham/bokeh,saifrahmed/bokeh,rothnic/bokeh,aavanian/bokeh,aiguofer/bokeh,draperjames/bokeh,rs2/bokeh,clairetang6/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,schoolie/bokeh,rothnic/bokeh,ericdill/bokeh,jakirkham/bokeh,azjps/bokeh,gpfreitas/bokeh,khkaminska/bokeh,srinathv/bokeh,evidation-health/bokeh,aavanian/bokeh,bokeh/bokeh,jakirkham/bokeh,jplourenco/bokeh,daodaoliang/bokeh,stonebig/bokeh,mindriot101/bokeh,deeplook/bokeh,tacaswell/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,aiguofer/bokeh,timsnyder/bokeh,ChinaQuants/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,evidation-health/bokeh,percyfal/bokeh,khkaminska/bokeh,stonebig/bokeh,matbra/bokeh,quasiben/bokeh,muku42/bokeh,xguse/bokeh,percyfal/bokeh,phobson/bokeh,muku42/bokeh,mindriot101/bokeh,timsnyder/bokeh,quasiben/bokeh,ptitjano/bokeh,philippjfr/bokeh,DuCorey/bokeh,rs2/bokeh,ptitjano/bokeh,paultcochrane/bokeh,philippjfr/bokeh,schoolie/bokeh,azjps/bokeh,KasperPRasmussen/bokeh,ChinaQuants/bokeh,maxalbert/bokeh,xguse/bokeh,Karel-van-de-Plassche/bokeh,muku42/bokeh,dennisobrien/bokeh,muku42/bokeh,tacaswell/bokeh,matbra/bokeh,gpfreitas/bokeh,paultcochrane/bokeh,Karel-van-de-Plassche/bokeh,bokeh/bokeh,ericmjl/bokeh,jakirkham/bokeh,rs2/bokeh,timsnyder/bokeh,msarahan/bokeh,azjps/bokeh,evidation-health/bokeh,htygithub/bokeh,ChinaQuants/bokeh,htygithub/bokeh,ericdill/bokeh,draperjames/bokeh,maxalbert/bokeh,percyfal/bokeh,rs2/bokeh,phobson/bokeh,bokeh/bokeh,phobson/bokeh,xguse/bokeh,gpfreitas/bokeh,maxalbert/bokeh,daodaoliang/bokeh,quasiben/bokeh,evidation-health/bokeh,deeplook/bokeh,khkaminska/bokeh,jplourenco/bokeh,percyfal/bokeh,msarahan/bokeh,stonebig/bokeh,paultcochrane/bokeh,htygithub/bokeh,maxalbert/bokeh,matbra/bokeh,msarahan/bokeh,srinathv/bokeh,DuCorey/bokeh,saifrahmed/bokeh,ptitjano/bokeh,dennisobrien/bokeh,jplourenco/bokeh,daodaoliang/bokeh,DuCorey/bokeh,dennisobrien/bokeh,gpfreitas/bokeh,stonebig/bokeh,philippjfr/bokeh,tacaswell/bokeh,justacec/bokeh,jakirkham/bokeh,draperjames/bokeh,KasperPRasmussen/bokeh,msarahan/bokeh,justacec/bokeh,clairetang6/bokeh,tacaswell/bokeh,xguse/bokeh,azjps/bokeh,draperjames/bokeh,rs2/bokeh,KasperPRasmussen/bokeh,bokeh/bokeh,clairetang6/bokeh,aiguofer/bokeh,aiguofer/bokeh,azjps/bokeh,ericmjl/bokeh,deeplook/bokeh,aiguofer/bokeh,rothnic/bokeh,aavanian/bokeh,saifrahmed/bokeh
|
''' Define standard warning codes and messages for Bokeh validation checks.
1000 : *MISSING_RENDERERS*
A |Plot| object has no renderers configured (will result in a blank plot).
1001 : *NO_GLYPH_RENDERERS*
A |Plot| object has no glyph renderers (will result in an empty plot frame).
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
+ 1003 : *COLON_IN_CATEGORY_LABEL*
+ Category label contains colons (will result in a blank layout).
+
9999 : *EXT*
Indicates that a custom warning check has failed.
'''
codes = {
- 1000: ("MISSING_RENDERERS", "Plot has no renderers"),
+ 1000: ("MISSING_RENDERERS", "Plot has no renderers"),
- 1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
+ 1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
- 1002: ("EMPTY_LAYOUT", "Layout has no children"),
+ 1002: ("EMPTY_LAYOUT", "Layout has no children"),
- 1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
+ 1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
- 9999: ("EXT", "Custom extension reports warning"),
+ 9999: ("EXT", "Custom extension reports warning"),
}
for code in codes:
exec("%s = %d" % (codes[code][0], code))
|
Add module level documentation for colon warning
|
## Code Before:
''' Define standard warning codes and messages for Bokeh validation checks.
1000 : *MISSING_RENDERERS*
A |Plot| object has no renderers configured (will result in a blank plot).
1001 : *NO_GLYPH_RENDERERS*
A |Plot| object has no glyph renderers (will result in an empty plot frame).
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
9999 : *EXT*
Indicates that a custom warning check has failed.
'''
codes = {
1000: ("MISSING_RENDERERS", "Plot has no renderers"),
1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
1002: ("EMPTY_LAYOUT", "Layout has no children"),
1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
9999: ("EXT", "Custom extension reports warning"),
}
for code in codes:
exec("%s = %d" % (codes[code][0], code))
## Instruction:
Add module level documentation for colon warning
## Code After:
''' Define standard warning codes and messages for Bokeh validation checks.
1000 : *MISSING_RENDERERS*
A |Plot| object has no renderers configured (will result in a blank plot).
1001 : *NO_GLYPH_RENDERERS*
A |Plot| object has no glyph renderers (will result in an empty plot frame).
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
1003 : *COLON_IN_CATEGORY_LABEL*
Category label contains colons (will result in a blank layout).
9999 : *EXT*
Indicates that a custom warning check has failed.
'''
codes = {
1000: ("MISSING_RENDERERS", "Plot has no renderers"),
1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
1002: ("EMPTY_LAYOUT", "Layout has no children"),
1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
9999: ("EXT", "Custom extension reports warning"),
}
for code in codes:
exec("%s = %d" % (codes[code][0], code))
|
''' Define standard warning codes and messages for Bokeh validation checks.
1000 : *MISSING_RENDERERS*
A |Plot| object has no renderers configured (will result in a blank plot).
1001 : *NO_GLYPH_RENDERERS*
A |Plot| object has no glyph renderers (will result in an empty plot frame).
1002 : *EMPTY_LAYOUT*
A layout model has no children (will result in a blank layout).
+ 1003 : *COLON_IN_CATEGORY_LABEL*
+ Category label contains colons (will result in a blank layout).
+
9999 : *EXT*
Indicates that a custom warning check has failed.
'''
codes = {
- 1000: ("MISSING_RENDERERS", "Plot has no renderers"),
+ 1000: ("MISSING_RENDERERS", "Plot has no renderers"),
? ++++
- 1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
+ 1001: ("NO_GLYPH_RENDERERS", "Plot has no glyph renderers"),
? ++++
- 1002: ("EMPTY_LAYOUT", "Layout has no children"),
+ 1002: ("EMPTY_LAYOUT", "Layout has no children"),
? ++++
- 1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
? -----
+ 1003: ("COLON_IN_CATEGORY_LABEL", "Category label contains colons"),
- 9999: ("EXT", "Custom extension reports warning"),
+ 9999: ("EXT", "Custom extension reports warning"),
? ++++
}
for code in codes:
exec("%s = %d" % (codes[code][0], code))
|
c8e57ffc08f89111bb628bdfa6114a76672e73b1
|
chmvh_website/gallery/signals.py
|
chmvh_website/gallery/signals.py
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from gallery.tasks import create_thumbnail, process_patient_picture
@receiver(post_save, sender='gallery.Patient')
def send_notifications(sender, instance, *args, **kwargs):
""" Notify users that a reply has been posted """
process_patient_picture.delay(instance)
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from gallery.tasks import process_patient_picture
@receiver(post_save, sender='gallery.Patient')
def process_picture(sender, instance, update_fields, *args, **kwargs):
"""
Process a patients picture.
This involves checking for different orientations as well as
generating a thumbnail for the picture.
Args:
sender:
The sender of the save event.
instance:
The Patient instance being saved.
update_fields:
The fields that were updated in the save.
*args:
Additional arguments.
**kwargs:
Additional keyword arguments.
"""
if not update_fields or 'thumbnail' not in update_fields:
process_patient_picture.delay(instance)
|
Fix infinite loop when processing pictures.
|
Fix infinite loop when processing pictures.
|
Python
|
mit
|
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
|
from django.db.models.signals import post_save
from django.dispatch import receiver
- from gallery.tasks import create_thumbnail, process_patient_picture
+ from gallery.tasks import process_patient_picture
@receiver(post_save, sender='gallery.Patient')
- def send_notifications(sender, instance, *args, **kwargs):
- """ Notify users that a reply has been posted """
- process_patient_picture.delay(instance)
+ def process_picture(sender, instance, update_fields, *args, **kwargs):
+ """
+ Process a patients picture.
+ This involves checking for different orientations as well as
+ generating a thumbnail for the picture.
+
+ Args:
+ sender:
+ The sender of the save event.
+ instance:
+ The Patient instance being saved.
+ update_fields:
+ The fields that were updated in the save.
+ *args:
+ Additional arguments.
+ **kwargs:
+ Additional keyword arguments.
+ """
+ if not update_fields or 'thumbnail' not in update_fields:
+ process_patient_picture.delay(instance)
+
|
Fix infinite loop when processing pictures.
|
## Code Before:
from django.db.models.signals import post_save
from django.dispatch import receiver
from gallery.tasks import create_thumbnail, process_patient_picture
@receiver(post_save, sender='gallery.Patient')
def send_notifications(sender, instance, *args, **kwargs):
""" Notify users that a reply has been posted """
process_patient_picture.delay(instance)
## Instruction:
Fix infinite loop when processing pictures.
## Code After:
from django.db.models.signals import post_save
from django.dispatch import receiver
from gallery.tasks import process_patient_picture
@receiver(post_save, sender='gallery.Patient')
def process_picture(sender, instance, update_fields, *args, **kwargs):
"""
Process a patients picture.
This involves checking for different orientations as well as
generating a thumbnail for the picture.
Args:
sender:
The sender of the save event.
instance:
The Patient instance being saved.
update_fields:
The fields that were updated in the save.
*args:
Additional arguments.
**kwargs:
Additional keyword arguments.
"""
if not update_fields or 'thumbnail' not in update_fields:
process_patient_picture.delay(instance)
|
from django.db.models.signals import post_save
from django.dispatch import receiver
- from gallery.tasks import create_thumbnail, process_patient_picture
? ------------------
+ from gallery.tasks import process_patient_picture
@receiver(post_save, sender='gallery.Patient')
- def send_notifications(sender, instance, *args, **kwargs):
- """ Notify users that a reply has been posted """
+ def process_picture(sender, instance, update_fields, *args, **kwargs):
+ """
+ Process a patients picture.
+
+ This involves checking for different orientations as well as
+ generating a thumbnail for the picture.
+
+ Args:
+ sender:
+ The sender of the save event.
+ instance:
+ The Patient instance being saved.
+ update_fields:
+ The fields that were updated in the save.
+ *args:
+ Additional arguments.
+ **kwargs:
+ Additional keyword arguments.
+ """
+ if not update_fields or 'thumbnail' not in update_fields:
- process_patient_picture.delay(instance)
+ process_patient_picture.delay(instance)
? ++++
|
62634879192e51b9f938da301534b08cf49d2e85
|
methodMang.py
|
methodMang.py
|
from methods import output, data
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
self.valid = self.valid + [(output.Output().methods, output.Output())]
self.valid = self.valid + [(data.Data().methods, data.Data())]
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val
|
from methods import io, data
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
def reg(it, c):
it.valid = it.valid + [(c().methods, c())]
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
reg(self, io.IO)
reg(self, data.Data)
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val
|
Rename Output + smaller Register
|
Rename Output + smaller Register
|
Python
|
mit
|
Icelys/Scotch-Language
|
- from methods import output, data
+ from methods import io, data
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
+ def reg(it, c):
+ it.valid = it.valid + [(c().methods, c())]
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
-
- self.valid = self.valid + [(output.Output().methods, output.Output())]
- self.valid = self.valid + [(data.Data().methods, data.Data())]
+
+ reg(self, io.IO)
+ reg(self, data.Data)
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val
|
Rename Output + smaller Register
|
## Code Before:
from methods import output, data
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
self.valid = self.valid + [(output.Output().methods, output.Output())]
self.valid = self.valid + [(data.Data().methods, data.Data())]
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val
## Instruction:
Rename Output + smaller Register
## Code After:
from methods import io, data
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
def reg(it, c):
it.valid = it.valid + [(c().methods, c())]
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
reg(self, io.IO)
reg(self, data.Data)
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val
|
- from methods import output, data
? -----
+ from methods import io, data
? +
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
+ def reg(it, c):
+ it.valid = it.valid + [(c().methods, c())]
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
-
- self.valid = self.valid + [(output.Output().methods, output.Output())]
- self.valid = self.valid + [(data.Data().methods, data.Data())]
+
+ reg(self, io.IO)
+ reg(self, data.Data)
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val
|
9ed49cee1ce669547f6d0278af00c3ad246fec78
|
migrations/versions/201608181200_11890f58b1df_add_tracks.py
|
migrations/versions/201608181200_11890f58b1df_add_tracks.py
|
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False, index=True),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
Fix incorrect indexes in alembic revision
|
Fix incorrect indexes in alembic revision
|
Python
|
mit
|
ThiefMaster/indico,ThiefMaster/indico,mic4ael/indico,pferreir/indico,pferreir/indico,mvidalgarcia/indico,indico/indico,ThiefMaster/indico,mvidalgarcia/indico,indico/indico,OmeGak/indico,mic4ael/indico,OmeGak/indico,mvidalgarcia/indico,DirkHoffmann/indico,mic4ael/indico,DirkHoffmann/indico,mvidalgarcia/indico,OmeGak/indico,DirkHoffmann/indico,pferreir/indico,mic4ael/indico,OmeGak/indico,indico/indico,ThiefMaster/indico,DirkHoffmann/indico,pferreir/indico,indico/indico
|
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
- sa.Column('id', sa.Integer(), nullable=False, index=True),
+ sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
- sa.Column('event_id', sa.Integer(), nullable=False),
+ sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
Fix incorrect indexes in alembic revision
|
## Code Before:
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False, index=True),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
## Instruction:
Fix incorrect indexes in alembic revision
## Code After:
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
- sa.Column('id', sa.Integer(), nullable=False, index=True),
? ------------
+ sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
- sa.Column('event_id', sa.Integer(), nullable=False),
+ sa.Column('event_id', sa.Integer(), nullable=False, index=True),
? ++++++++++++
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
d5f02b13db9b6d23e15bc07a985b8c67644ffb44
|
pyclibrary/__init__.py
|
pyclibrary/__init__.py
|
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from .c_parser import win_defs, CParser
from .c_library import CLibrary, address_of, build_array
from .errors import DefinitionError
from .init import init, auto_init
|
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import logging
logging.getLogger('pyclibrary').addHandler(logging.NullHandler())
from .c_parser import win_defs, CParser
from .c_library import CLibrary, address_of, build_array
from .errors import DefinitionError
from .init import init, auto_init
|
Add NullHandler to avoid logging complaining for nothing.
|
Add NullHandler to avoid logging complaining for nothing.
|
Python
|
mit
|
MatthieuDartiailh/pyclibrary,mrh1997/pyclibrary,mrh1997/pyclibrary,MatthieuDartiailh/pyclibrary,mrh1997/pyclibrary,duguxy/pyclibrary,duguxy/pyclibrary,duguxy/pyclibrary
|
from __future__ import (division, unicode_literals, print_function,
absolute_import)
+ import logging
+ logging.getLogger('pyclibrary').addHandler(logging.NullHandler())
from .c_parser import win_defs, CParser
from .c_library import CLibrary, address_of, build_array
from .errors import DefinitionError
from .init import init, auto_init
|
Add NullHandler to avoid logging complaining for nothing.
|
## Code Before:
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from .c_parser import win_defs, CParser
from .c_library import CLibrary, address_of, build_array
from .errors import DefinitionError
from .init import init, auto_init
## Instruction:
Add NullHandler to avoid logging complaining for nothing.
## Code After:
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import logging
logging.getLogger('pyclibrary').addHandler(logging.NullHandler())
from .c_parser import win_defs, CParser
from .c_library import CLibrary, address_of, build_array
from .errors import DefinitionError
from .init import init, auto_init
|
from __future__ import (division, unicode_literals, print_function,
absolute_import)
+ import logging
+ logging.getLogger('pyclibrary').addHandler(logging.NullHandler())
from .c_parser import win_defs, CParser
from .c_library import CLibrary, address_of, build_array
from .errors import DefinitionError
from .init import init, auto_init
|
acd84f19d8d8820aecdba62bf4d0c97a2d4bdf34
|
src/source_weather/source_weather.py
|
src/source_weather/source_weather.py
|
from src.source import Source
class SourceMock(Source):
"""Add a funny key with a funny value in the given dict"""
def __init__(self, funny_message="Java.OutOfMemoryError"
funny_key="Who's there ?"):
self.funny_message = funny_message
self.funny_key = funny_key
def enrichment(self, data_dict):
data_dict[self.funny_key] = self.funny_message
return data_dict
def keywords(self):
return {self.funny_key}
|
from src.source import Source
from . import weather
class SourceWeaver(Source):
"""
Throught Open Weather Map generates today weather and
expected weather for next days, if possible
"""
def enrichment(self, data_dict):
if default.FIELD_COORDINATES in data_dict:
lat, lon = data_dict[default.FIELD_COORDINATES]
data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon)
if default.FIELD_DATE in data_dict:
date = data_dict[default.FIELD_DATE]
if weather.is_predictable(date):
data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
return data_dict
def keywords(self):
return {default.FIELD_WEATHER_PREDICTED,
default.FIELD_WEATHER}
|
Access to actual or predicted weather done
|
Access to actual or predicted weather done
|
Python
|
unlicense
|
Aluriak/24hducode2016,Aluriak/24hducode2016
|
from src.source import Source
+ from . import weather
- class SourceMock(Source):
+ class SourceWeaver(Source):
- """Add a funny key with a funny value in the given dict"""
+ """
+ Throught Open Weather Map generates today weather and
+ expected weather for next days, if possible
+ """
- def __init__(self, funny_message="Java.OutOfMemoryError"
- funny_key="Who's there ?"):
- self.funny_message = funny_message
- self.funny_key = funny_key
def enrichment(self, data_dict):
- data_dict[self.funny_key] = self.funny_message
+ if default.FIELD_COORDINATES in data_dict:
+ lat, lon = data_dict[default.FIELD_COORDINATES]
+ data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon)
+ if default.FIELD_DATE in data_dict:
+ date = data_dict[default.FIELD_DATE]
+ if weather.is_predictable(date):
+ data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
+
return data_dict
def keywords(self):
- return {self.funny_key}
+ return {default.FIELD_WEATHER_PREDICTED,
+ default.FIELD_WEATHER}
|
Access to actual or predicted weather done
|
## Code Before:
from src.source import Source
class SourceMock(Source):
"""Add a funny key with a funny value in the given dict"""
def __init__(self, funny_message="Java.OutOfMemoryError"
funny_key="Who's there ?"):
self.funny_message = funny_message
self.funny_key = funny_key
def enrichment(self, data_dict):
data_dict[self.funny_key] = self.funny_message
return data_dict
def keywords(self):
return {self.funny_key}
## Instruction:
Access to actual or predicted weather done
## Code After:
from src.source import Source
from . import weather
class SourceWeaver(Source):
"""
Throught Open Weather Map generates today weather and
expected weather for next days, if possible
"""
def enrichment(self, data_dict):
if default.FIELD_COORDINATES in data_dict:
lat, lon = data_dict[default.FIELD_COORDINATES]
data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon)
if default.FIELD_DATE in data_dict:
date = data_dict[default.FIELD_DATE]
if weather.is_predictable(date):
data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
return data_dict
def keywords(self):
return {default.FIELD_WEATHER_PREDICTED,
default.FIELD_WEATHER}
|
from src.source import Source
+ from . import weather
- class SourceMock(Source):
? ^^^^
+ class SourceWeaver(Source):
? ^^^^^^
- """Add a funny key with a funny value in the given dict"""
+ """
+ Throught Open Weather Map generates today weather and
+ expected weather for next days, if possible
+ """
- def __init__(self, funny_message="Java.OutOfMemoryError"
- funny_key="Who's there ?"):
- self.funny_message = funny_message
- self.funny_key = funny_key
def enrichment(self, data_dict):
- data_dict[self.funny_key] = self.funny_message
+ if default.FIELD_COORDINATES in data_dict:
+ lat, lon = data_dict[default.FIELD_COORDINATES]
+ data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon)
+ if default.FIELD_DATE in data_dict:
+ date = data_dict[default.FIELD_DATE]
+ if weather.is_predictable(date):
+ data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
+
return data_dict
def keywords(self):
- return {self.funny_key}
+ return {default.FIELD_WEATHER_PREDICTED,
+ default.FIELD_WEATHER}
|
07d113e4604994bf1857b3ae7201571776b65154
|
etl/make_feature_tsv.py
|
etl/make_feature_tsv.py
|
import string, sys
import h5py
import numpy as np
hF = h5py.File(sys.argv[1])
group = "mm10"
indptr = hF[group +"/indptr"]
indices = hF[group + "/indices"]
data = hF[group + "/data"]
genes = hF[group + "/genes"]
gene_names = hF[group + "/gene_names"]
barcodes = hF[group + "/barcodes"]
shape = hF[group + "/shape"]
rowN = shape[0]
colN = shape[1]
counter_indptr_size = rowN
fout = open("features.tsv",'w')
for i in range (0, len(genes)):
fout.write("{} {} {}".format(i, genes[i], gene_names[i]))
|
import string, sys
import h5py
import numpy as np
hF = h5py.File(sys.argv[1])
group = "mm10"
indptr = hF[group +"/indptr"]
indices = hF[group + "/indices"]
data = hF[group + "/data"]
genes = hF[group + "/genes"]
gene_names = hF[group + "/gene_names"]
barcodes = hF[group + "/barcodes"]
shape = hF[group + "/shape"]
rowN = shape[0]
colN = shape[1]
counter_indptr_size = rowN
fout = open("features.tsv",'w')
fout.write("index\tfeature\tfeature_name\n")
for i in range (0, len(genes)):
fout.write("{}\t{}\t{}\n".format(i, genes[i], gene_names[i]))
|
Make a tsv instead of a long string
|
Make a tsv instead of a long string
|
Python
|
apache-2.0
|
david4096/celldb
|
import string, sys
import h5py
import numpy as np
hF = h5py.File(sys.argv[1])
group = "mm10"
indptr = hF[group +"/indptr"]
indices = hF[group + "/indices"]
data = hF[group + "/data"]
genes = hF[group + "/genes"]
gene_names = hF[group + "/gene_names"]
barcodes = hF[group + "/barcodes"]
shape = hF[group + "/shape"]
rowN = shape[0]
colN = shape[1]
counter_indptr_size = rowN
fout = open("features.tsv",'w')
+ fout.write("index\tfeature\tfeature_name\n")
+ for i in range (0, len(genes)):
+ fout.write("{}\t{}\t{}\n".format(i, genes[i], gene_names[i]))
- for i in range (0, len(genes)):
- fout.write("{} {} {}".format(i, genes[i], gene_names[i]))
-
|
Make a tsv instead of a long string
|
## Code Before:
import string, sys
import h5py
import numpy as np
hF = h5py.File(sys.argv[1])
group = "mm10"
indptr = hF[group +"/indptr"]
indices = hF[group + "/indices"]
data = hF[group + "/data"]
genes = hF[group + "/genes"]
gene_names = hF[group + "/gene_names"]
barcodes = hF[group + "/barcodes"]
shape = hF[group + "/shape"]
rowN = shape[0]
colN = shape[1]
counter_indptr_size = rowN
fout = open("features.tsv",'w')
for i in range (0, len(genes)):
fout.write("{} {} {}".format(i, genes[i], gene_names[i]))
## Instruction:
Make a tsv instead of a long string
## Code After:
import string, sys
import h5py
import numpy as np
hF = h5py.File(sys.argv[1])
group = "mm10"
indptr = hF[group +"/indptr"]
indices = hF[group + "/indices"]
data = hF[group + "/data"]
genes = hF[group + "/genes"]
gene_names = hF[group + "/gene_names"]
barcodes = hF[group + "/barcodes"]
shape = hF[group + "/shape"]
rowN = shape[0]
colN = shape[1]
counter_indptr_size = rowN
fout = open("features.tsv",'w')
fout.write("index\tfeature\tfeature_name\n")
for i in range (0, len(genes)):
fout.write("{}\t{}\t{}\n".format(i, genes[i], gene_names[i]))
|
import string, sys
import h5py
import numpy as np
hF = h5py.File(sys.argv[1])
group = "mm10"
indptr = hF[group +"/indptr"]
indices = hF[group + "/indices"]
data = hF[group + "/data"]
genes = hF[group + "/genes"]
gene_names = hF[group + "/gene_names"]
barcodes = hF[group + "/barcodes"]
shape = hF[group + "/shape"]
rowN = shape[0]
colN = shape[1]
counter_indptr_size = rowN
fout = open("features.tsv",'w')
-
+ fout.write("index\tfeature\tfeature_name\n")
for i in range (0, len(genes)):
- fout.write("{} {} {}".format(i, genes[i], gene_names[i]))
? ^ ^
+ fout.write("{}\t{}\t{}\n".format(i, genes[i], gene_names[i]))
? ^^ ^^ ++
|
132b354f03d10ebc5a55152fef30ffbfb4b82a28
|
tests/dev/test_horoscope.py
|
tests/dev/test_horoscope.py
|
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
self.assertTrue(type(result.output.encode('ascii', 'ignore')) == str)
|
import sys
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
if sys.version_info[0] == 3:
string_types = str
else:
string_types = basestring
self.assertIsInstance(result.output, string_types)
|
Fix broken test for Python 2.x/3.x
|
Fix broken test for Python 2.x/3.x
|
Python
|
mit
|
dude-pa/dude
|
+ import sys
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
- self.assertTrue(type(result.output.encode('ascii', 'ignore')) == str)
+ if sys.version_info[0] == 3:
+ string_types = str
+ else:
+ string_types = basestring
+
+ self.assertIsInstance(result.output, string_types)
+
|
Fix broken test for Python 2.x/3.x
|
## Code Before:
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
self.assertTrue(type(result.output.encode('ascii', 'ignore')) == str)
## Instruction:
Fix broken test for Python 2.x/3.x
## Code After:
import sys
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
if sys.version_info[0] == 3:
string_types = str
else:
string_types = basestring
self.assertIsInstance(result.output, string_types)
|
+ import sys
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
- self.assertTrue(type(result.output.encode('ascii', 'ignore')) == str)
+
+ if sys.version_info[0] == 3:
+ string_types = str
+ else:
+ string_types = basestring
+
+ self.assertIsInstance(result.output, string_types)
|
d0f2b11fb67655b884f298bd8c1bf6be8396de4f
|
mail/email.py
|
mail/email.py
|
from django.conf import settings
from mail import models as mail_api
from groups import models as group_api
from mailgun import api as mailgun_api
def send_email( email_uri ):
""" Send the email to the intended target audience """
email = mail_api.get_email(email_uri)
if email['audience'] == 'groups':
to_address = ','.join([g['address'] for g in group_api.get_groups(email['sequence'])])
elif email['audience'] == 'individuals':
to_address = 'sequence-{0}-all@{1}'.format(email['sequence'], settings.EMAIL_DOMAIN)
mailgun_api.send_email(
to_address,
settings.DEFAULT_FROM_EMAIL,
email['subject'],
email['text_body'],
email['html_body'],
email['tags'].split(','),
'sequence-{0}-campaign'
)
mail_api.mark_sent(email_uri)
|
from django.conf import settings
from mail import models as mail_api
from groups import models as group_api
from mailgun import api as mailgun_api
def send_email( email_uri ):
""" Send the email to the intended target audience """
email = mail_api.get_email(email_uri)
if email['audience'] == 'groups':
to_address = ','.join([g['address'] for g in group_api.get_groups(email['sequence'])])
elif email['audience'] == 'individuals':
to_address = 'sequence-{0}-all@{1}'.format(email['sequence'], settings.EMAIL_DOMAIN)
mailgun_api.send_email(
to_address,
settings.DEFAULT_FROM_EMAIL,
email['subject'],
email['text_body'],
email['html_body'],
email['tags'].split(','),
'sequence-{0}-campaign'.format(email['sequence'])
)
mail_api.mark_sent(email_uri)
|
Fix bug with campaign id
|
Fix bug with campaign id
|
Python
|
mit
|
p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc
|
from django.conf import settings
from mail import models as mail_api
from groups import models as group_api
from mailgun import api as mailgun_api
def send_email( email_uri ):
""" Send the email to the intended target audience """
email = mail_api.get_email(email_uri)
if email['audience'] == 'groups':
to_address = ','.join([g['address'] for g in group_api.get_groups(email['sequence'])])
elif email['audience'] == 'individuals':
to_address = 'sequence-{0}-all@{1}'.format(email['sequence'], settings.EMAIL_DOMAIN)
mailgun_api.send_email(
to_address,
settings.DEFAULT_FROM_EMAIL,
email['subject'],
email['text_body'],
email['html_body'],
email['tags'].split(','),
- 'sequence-{0}-campaign'
+ 'sequence-{0}-campaign'.format(email['sequence'])
)
mail_api.mark_sent(email_uri)
|
Fix bug with campaign id
|
## Code Before:
from django.conf import settings
from mail import models as mail_api
from groups import models as group_api
from mailgun import api as mailgun_api
def send_email( email_uri ):
""" Send the email to the intended target audience """
email = mail_api.get_email(email_uri)
if email['audience'] == 'groups':
to_address = ','.join([g['address'] for g in group_api.get_groups(email['sequence'])])
elif email['audience'] == 'individuals':
to_address = 'sequence-{0}-all@{1}'.format(email['sequence'], settings.EMAIL_DOMAIN)
mailgun_api.send_email(
to_address,
settings.DEFAULT_FROM_EMAIL,
email['subject'],
email['text_body'],
email['html_body'],
email['tags'].split(','),
'sequence-{0}-campaign'
)
mail_api.mark_sent(email_uri)
## Instruction:
Fix bug with campaign id
## Code After:
from django.conf import settings
from mail import models as mail_api
from groups import models as group_api
from mailgun import api as mailgun_api
def send_email( email_uri ):
""" Send the email to the intended target audience """
email = mail_api.get_email(email_uri)
if email['audience'] == 'groups':
to_address = ','.join([g['address'] for g in group_api.get_groups(email['sequence'])])
elif email['audience'] == 'individuals':
to_address = 'sequence-{0}-all@{1}'.format(email['sequence'], settings.EMAIL_DOMAIN)
mailgun_api.send_email(
to_address,
settings.DEFAULT_FROM_EMAIL,
email['subject'],
email['text_body'],
email['html_body'],
email['tags'].split(','),
'sequence-{0}-campaign'.format(email['sequence'])
)
mail_api.mark_sent(email_uri)
|
from django.conf import settings
from mail import models as mail_api
from groups import models as group_api
from mailgun import api as mailgun_api
def send_email( email_uri ):
""" Send the email to the intended target audience """
email = mail_api.get_email(email_uri)
if email['audience'] == 'groups':
to_address = ','.join([g['address'] for g in group_api.get_groups(email['sequence'])])
elif email['audience'] == 'individuals':
to_address = 'sequence-{0}-all@{1}'.format(email['sequence'], settings.EMAIL_DOMAIN)
mailgun_api.send_email(
to_address,
settings.DEFAULT_FROM_EMAIL,
email['subject'],
email['text_body'],
email['html_body'],
email['tags'].split(','),
- 'sequence-{0}-campaign'
+ 'sequence-{0}-campaign'.format(email['sequence'])
)
mail_api.mark_sent(email_uri)
|
d16c99033f10be0b35a3d2bb18914d364c51b677
|
metro_sale/sale_product.py
|
metro_sale/sale_product.py
|
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
|
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
_sql_constraints = [
('name_uniq', 'unique(name)', 'ID must be unique!'),
]
|
Add the ID unique constraint
|
Add the ID unique constraint
|
Python
|
agpl-3.0
|
john-wang-metro/metro-openerp,837278709/metro-openerp,john-wang-metro/metro-openerp,837278709/metro-openerp,john-wang-metro/metro-openerp,837278709/metro-openerp
|
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
-
+ _sql_constraints = [
+ ('name_uniq', 'unique(name)', 'ID must be unique!'),
+ ]
|
Add the ID unique constraint
|
## Code Before:
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
## Instruction:
Add the ID unique constraint
## Code After:
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
_sql_constraints = [
('name_uniq', 'unique(name)', 'ID must be unique!'),
]
|
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
+ _sql_constraints = [
+ ('name_uniq', 'unique(name)', 'ID must be unique!'),
+ ]
|
c0a74c86e772185d35f0e6049e0ce04fcdb30793
|
chatterbot/adapters/io/multi_adapter.py
|
chatterbot/adapters/io/multi_adapter.py
|
from .io import IOAdapter
class MultiIOAdapter(IOAdapter):
def __init__(self, **kwargs):
super(MultiIOAdapter, self).__init__(**kwargs)
self.adapters = []
def process_input(self, *args, **kwargs):
"""
Returns data retrieved from the input source.
"""
if self.adapters is not []:
return self.adapters[0].process_input(*args, **kwargs)
def process_response(self, statement):
"""
Takes an input value.
Returns an output value.
"""
for adapter in self.adapters:
adapter.process_response(statement)
return self.adapters[0].process_response(statement)
def add_adapter(self, adapter):
self.adapters.append(adapter)
def set_context(self, context):
"""
Set the context for each of the contained io adapters.
"""
super(MultiIOAdapter, self).set_context(context)
for adapter in self.adapters:
adapter.set_context(context)
|
from .io import IOAdapter
class MultiIOAdapter(IOAdapter):
def __init__(self, **kwargs):
super(MultiIOAdapter, self).__init__(**kwargs)
self.adapters = []
def process_input(self, *args, **kwargs):
"""
Returns data retrieved from the input source.
"""
if self.adapters is not []:
return self.adapters[0].process_input(*args, **kwargs)
def process_response(self, statement):
"""
Takes an input value.
Returns an output value.
"""
for i in range(1, len(self.adapters)):
self.adapters[i].process_response(statement)
return self.adapters[0].process_response(statement)
def add_adapter(self, adapter):
self.adapters.append(adapter)
def set_context(self, context):
"""
Set the context for each of the contained io adapters.
"""
super(MultiIOAdapter, self).set_context(context)
for adapter in self.adapters:
adapter.set_context(context)
|
Fix first io adapter being called twice.
|
Fix first io adapter being called twice.
|
Python
|
bsd-3-clause
|
Reinaesaya/OUIRL-ChatBot,maclogan/VirtualPenPal,Reinaesaya/OUIRL-ChatBot,Gustavo6046/ChatterBot,davizucon/ChatterBot,gunthercox/ChatterBot,vkosuri/ChatterBot
|
from .io import IOAdapter
class MultiIOAdapter(IOAdapter):
def __init__(self, **kwargs):
super(MultiIOAdapter, self).__init__(**kwargs)
self.adapters = []
def process_input(self, *args, **kwargs):
"""
Returns data retrieved from the input source.
"""
if self.adapters is not []:
return self.adapters[0].process_input(*args, **kwargs)
def process_response(self, statement):
"""
Takes an input value.
Returns an output value.
"""
- for adapter in self.adapters:
+ for i in range(1, len(self.adapters)):
- adapter.process_response(statement)
+ self.adapters[i].process_response(statement)
return self.adapters[0].process_response(statement)
def add_adapter(self, adapter):
self.adapters.append(adapter)
def set_context(self, context):
"""
Set the context for each of the contained io adapters.
"""
super(MultiIOAdapter, self).set_context(context)
for adapter in self.adapters:
adapter.set_context(context)
|
Fix first io adapter being called twice.
|
## Code Before:
from .io import IOAdapter
class MultiIOAdapter(IOAdapter):
def __init__(self, **kwargs):
super(MultiIOAdapter, self).__init__(**kwargs)
self.adapters = []
def process_input(self, *args, **kwargs):
"""
Returns data retrieved from the input source.
"""
if self.adapters is not []:
return self.adapters[0].process_input(*args, **kwargs)
def process_response(self, statement):
"""
Takes an input value.
Returns an output value.
"""
for adapter in self.adapters:
adapter.process_response(statement)
return self.adapters[0].process_response(statement)
def add_adapter(self, adapter):
self.adapters.append(adapter)
def set_context(self, context):
"""
Set the context for each of the contained io adapters.
"""
super(MultiIOAdapter, self).set_context(context)
for adapter in self.adapters:
adapter.set_context(context)
## Instruction:
Fix first io adapter being called twice.
## Code After:
from .io import IOAdapter
class MultiIOAdapter(IOAdapter):
def __init__(self, **kwargs):
super(MultiIOAdapter, self).__init__(**kwargs)
self.adapters = []
def process_input(self, *args, **kwargs):
"""
Returns data retrieved from the input source.
"""
if self.adapters is not []:
return self.adapters[0].process_input(*args, **kwargs)
def process_response(self, statement):
"""
Takes an input value.
Returns an output value.
"""
for i in range(1, len(self.adapters)):
self.adapters[i].process_response(statement)
return self.adapters[0].process_response(statement)
def add_adapter(self, adapter):
self.adapters.append(adapter)
def set_context(self, context):
"""
Set the context for each of the contained io adapters.
"""
super(MultiIOAdapter, self).set_context(context)
for adapter in self.adapters:
adapter.set_context(context)
|
from .io import IOAdapter
class MultiIOAdapter(IOAdapter):
def __init__(self, **kwargs):
super(MultiIOAdapter, self).__init__(**kwargs)
self.adapters = []
def process_input(self, *args, **kwargs):
"""
Returns data retrieved from the input source.
"""
if self.adapters is not []:
return self.adapters[0].process_input(*args, **kwargs)
def process_response(self, statement):
"""
Takes an input value.
Returns an output value.
"""
- for adapter in self.adapters:
+ for i in range(1, len(self.adapters)):
- adapter.process_response(statement)
+ self.adapters[i].process_response(statement)
? +++++ ++++
return self.adapters[0].process_response(statement)
def add_adapter(self, adapter):
self.adapters.append(adapter)
def set_context(self, context):
"""
Set the context for each of the contained io adapters.
"""
super(MultiIOAdapter, self).set_context(context)
for adapter in self.adapters:
adapter.set_context(context)
|
edcfe2b156af23943478bc86592b4c8d5dc07e10
|
flask_mongoengine/json.py
|
flask_mongoengine/json.py
|
from flask.json import JSONEncoder
from bson import json_util
from mongoengine.base import BaseDocument
from mongoengine import QuerySet
def _make_encoder(superclass):
class MongoEngineJSONEncoder(superclass):
'''
A JSONEncoder which provides serialization of MongoEngine
documents and querysets.
'''
def default(self, obj):
if isinstance(obj, BaseDocument):
return json_util._json_convert(obj.to_mongo())
elif isinstance(obj, QuerySet):
return json_util._json_convert(obj.as_pymongo())
return superclass.default(self, obj)
return MongoEngineJSONEncoder
MongoEngineJSONEncoder = _make_encoder(JSONEncoder)
def overide_json_encoder(app):
'''
A function to dynamically create a new MongoEngineJSONEncoder class
based upon a custom base class.
This function allows us to combine MongoEngine serialization with
any changes to Flask's JSONEncoder which a user may have made
prior to calling init_app.
NOTE: This does not cover situations where users override
an instance's json_encoder after calling init_app.
'''
app.json_encoder = _make_encoder(app.json_encoder)
|
from flask.json import JSONEncoder
from bson import json_util
from mongoengine.base import BaseDocument
try:
from mongoengine.base import BaseQuerySet
except ImportError as ie: # support mongoengine < 0.7
from mongoengine.queryset import QuerySet as BaseQuerySet
def _make_encoder(superclass):
class MongoEngineJSONEncoder(superclass):
'''
A JSONEncoder which provides serialization of MongoEngine
documents and queryset objects.
'''
def default(self, obj):
if isinstance(obj, BaseDocument):
return json_util._json_convert(obj.to_mongo())
elif isinstance(obj, BaseQuerySet):
return json_util._json_convert(obj.as_pymongo())
return superclass.default(self, obj)
return MongoEngineJSONEncoder
MongoEngineJSONEncoder = _make_encoder(JSONEncoder)
def overide_json_encoder(app):
'''
A function to dynamically create a new MongoEngineJSONEncoder class
based upon a custom base class.
This function allows us to combine MongoEngine serialization with
any changes to Flask's JSONEncoder which a user may have made
prior to calling init_app.
NOTE: This does not cover situations where users override
an instance's json_encoder after calling init_app.
'''
app.json_encoder = _make_encoder(app.json_encoder)
|
Support older versions of MongoEngine
|
Support older versions of MongoEngine
|
Python
|
bsd-3-clause
|
gerasim13/flask-mongoengine-1,rochacbruno/flask-mongoengine,quokkaproject/flask-mongoengine,quokkaproject/flask-mongoengine,gerasim13/flask-mongoengine-1,losintikfos/flask-mongoengine,rochacbruno/flask-mongoengine,losintikfos/flask-mongoengine
|
from flask.json import JSONEncoder
from bson import json_util
from mongoengine.base import BaseDocument
+ try:
- from mongoengine import QuerySet
+ from mongoengine.base import BaseQuerySet
-
+ except ImportError as ie: # support mongoengine < 0.7
+ from mongoengine.queryset import QuerySet as BaseQuerySet
def _make_encoder(superclass):
class MongoEngineJSONEncoder(superclass):
'''
A JSONEncoder which provides serialization of MongoEngine
- documents and querysets.
+ documents and queryset objects.
'''
def default(self, obj):
if isinstance(obj, BaseDocument):
return json_util._json_convert(obj.to_mongo())
- elif isinstance(obj, QuerySet):
+ elif isinstance(obj, BaseQuerySet):
return json_util._json_convert(obj.as_pymongo())
return superclass.default(self, obj)
return MongoEngineJSONEncoder
MongoEngineJSONEncoder = _make_encoder(JSONEncoder)
def overide_json_encoder(app):
'''
A function to dynamically create a new MongoEngineJSONEncoder class
based upon a custom base class.
This function allows us to combine MongoEngine serialization with
any changes to Flask's JSONEncoder which a user may have made
prior to calling init_app.
NOTE: This does not cover situations where users override
an instance's json_encoder after calling init_app.
'''
app.json_encoder = _make_encoder(app.json_encoder)
|
Support older versions of MongoEngine
|
## Code Before:
from flask.json import JSONEncoder
from bson import json_util
from mongoengine.base import BaseDocument
from mongoengine import QuerySet
def _make_encoder(superclass):
class MongoEngineJSONEncoder(superclass):
'''
A JSONEncoder which provides serialization of MongoEngine
documents and querysets.
'''
def default(self, obj):
if isinstance(obj, BaseDocument):
return json_util._json_convert(obj.to_mongo())
elif isinstance(obj, QuerySet):
return json_util._json_convert(obj.as_pymongo())
return superclass.default(self, obj)
return MongoEngineJSONEncoder
MongoEngineJSONEncoder = _make_encoder(JSONEncoder)
def overide_json_encoder(app):
'''
A function to dynamically create a new MongoEngineJSONEncoder class
based upon a custom base class.
This function allows us to combine MongoEngine serialization with
any changes to Flask's JSONEncoder which a user may have made
prior to calling init_app.
NOTE: This does not cover situations where users override
an instance's json_encoder after calling init_app.
'''
app.json_encoder = _make_encoder(app.json_encoder)
## Instruction:
Support older versions of MongoEngine
## Code After:
from flask.json import JSONEncoder
from bson import json_util
from mongoengine.base import BaseDocument
try:
from mongoengine.base import BaseQuerySet
except ImportError as ie: # support mongoengine < 0.7
from mongoengine.queryset import QuerySet as BaseQuerySet
def _make_encoder(superclass):
class MongoEngineJSONEncoder(superclass):
'''
A JSONEncoder which provides serialization of MongoEngine
documents and queryset objects.
'''
def default(self, obj):
if isinstance(obj, BaseDocument):
return json_util._json_convert(obj.to_mongo())
elif isinstance(obj, BaseQuerySet):
return json_util._json_convert(obj.as_pymongo())
return superclass.default(self, obj)
return MongoEngineJSONEncoder
MongoEngineJSONEncoder = _make_encoder(JSONEncoder)
def overide_json_encoder(app):
'''
A function to dynamically create a new MongoEngineJSONEncoder class
based upon a custom base class.
This function allows us to combine MongoEngine serialization with
any changes to Flask's JSONEncoder which a user may have made
prior to calling init_app.
NOTE: This does not cover situations where users override
an instance's json_encoder after calling init_app.
'''
app.json_encoder = _make_encoder(app.json_encoder)
|
from flask.json import JSONEncoder
from bson import json_util
from mongoengine.base import BaseDocument
+ try:
- from mongoengine import QuerySet
+ from mongoengine.base import BaseQuerySet
? ++++ +++++ ++++
-
+ except ImportError as ie: # support mongoengine < 0.7
+ from mongoengine.queryset import QuerySet as BaseQuerySet
def _make_encoder(superclass):
class MongoEngineJSONEncoder(superclass):
'''
A JSONEncoder which provides serialization of MongoEngine
- documents and querysets.
+ documents and queryset objects.
? +++++++
'''
def default(self, obj):
if isinstance(obj, BaseDocument):
return json_util._json_convert(obj.to_mongo())
- elif isinstance(obj, QuerySet):
+ elif isinstance(obj, BaseQuerySet):
? ++++
return json_util._json_convert(obj.as_pymongo())
return superclass.default(self, obj)
return MongoEngineJSONEncoder
MongoEngineJSONEncoder = _make_encoder(JSONEncoder)
def overide_json_encoder(app):
'''
A function to dynamically create a new MongoEngineJSONEncoder class
based upon a custom base class.
This function allows us to combine MongoEngine serialization with
any changes to Flask's JSONEncoder which a user may have made
prior to calling init_app.
NOTE: This does not cover situations where users override
an instance's json_encoder after calling init_app.
'''
app.json_encoder = _make_encoder(app.json_encoder)
|
6f464e422befe22e56bb759a7ac7ff52a353c6d9
|
accountant/functional_tests/test_layout_and_styling.py
|
accountant/functional_tests/test_layout_and_styling.py
|
import unittest
from .base import FunctionalTestCase
from .pages import game
class StylesheetTests(FunctionalTestCase):
def test_color_css_loaded(self):
self.story('Create a game')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
page.start_button.click()
self.assertTrue(any('css/color.css' in s.get_attribute('href')
for s in page.stylesheets))
def test_main_stylesheet_loaded(self):
self.story('Load the start page')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
|
import unittest
from .base import FunctionalTestCase
from .pages import game
class StylesheetTests(FunctionalTestCase):
def test_color_css_loaded(self):
self.story('Create a game')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
page.start_button.click()
self.assertTrue(any('css/color.css' in s.get_attribute('href')
for s in page.stylesheets))
def test_main_stylesheet_loaded(self):
self.story('Load the start page')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
# Test constant to see if css actually gets loaded
self.assertEqual('rgb(55, 71, 79)',
page.bank_cash.value_of_css_property('border-color'))
|
Test is loaded CSS is applied
|
Test is loaded CSS is applied
|
Python
|
mit
|
XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant
|
import unittest
from .base import FunctionalTestCase
from .pages import game
class StylesheetTests(FunctionalTestCase):
def test_color_css_loaded(self):
self.story('Create a game')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
page.start_button.click()
self.assertTrue(any('css/color.css' in s.get_attribute('href')
for s in page.stylesheets))
def test_main_stylesheet_loaded(self):
self.story('Load the start page')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
+ # Test constant to see if css actually gets loaded
+ self.assertEqual('rgb(55, 71, 79)',
+ page.bank_cash.value_of_css_property('border-color'))
+
|
Test is loaded CSS is applied
|
## Code Before:
import unittest
from .base import FunctionalTestCase
from .pages import game
class StylesheetTests(FunctionalTestCase):
def test_color_css_loaded(self):
self.story('Create a game')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
page.start_button.click()
self.assertTrue(any('css/color.css' in s.get_attribute('href')
for s in page.stylesheets))
def test_main_stylesheet_loaded(self):
self.story('Load the start page')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
## Instruction:
Test is loaded CSS is applied
## Code After:
import unittest
from .base import FunctionalTestCase
from .pages import game
class StylesheetTests(FunctionalTestCase):
def test_color_css_loaded(self):
self.story('Create a game')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
page.start_button.click()
self.assertTrue(any('css/color.css' in s.get_attribute('href')
for s in page.stylesheets))
def test_main_stylesheet_loaded(self):
self.story('Load the start page')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
# Test constant to see if css actually gets loaded
self.assertEqual('rgb(55, 71, 79)',
page.bank_cash.value_of_css_property('border-color'))
|
import unittest
from .base import FunctionalTestCase
from .pages import game
class StylesheetTests(FunctionalTestCase):
def test_color_css_loaded(self):
self.story('Create a game')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
page.start_button.click()
self.assertTrue(any('css/color.css' in s.get_attribute('href')
for s in page.stylesheets))
def test_main_stylesheet_loaded(self):
self.story('Load the start page')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
+
+ # Test constant to see if css actually gets loaded
+ self.assertEqual('rgb(55, 71, 79)',
+ page.bank_cash.value_of_css_property('border-color'))
|
641434ef0d1056fecdedbe7dacfe2d915b89408b
|
undecorated.py
|
undecorated.py
|
"""Return a function with any decorators removed """
__version__ = '0.1.1'
def undecorated(o):
"""Remove all decorators from a function, method or class"""
# class decorator
if type(o) is type:
return o
try:
# python2
closure = o.func_closure
except AttributeError:
pass
try:
# python3
closure = o.__closure__
except AttributeError:
return
if closure:
for cell in closure:
# avoid infinite recursion
if cell.cell_contents is o:
continue
undecd = undecorated(cell.cell_contents)
if undecd:
return undecd
else:
return o
|
__version__ = '0.1.1'
def undecorated(o):
"""Remove all decorators from a function, method or class"""
# class decorator
if type(o) is type:
return o
try:
# python2
closure = o.func_closure
except AttributeError:
pass
try:
# python3
closure = o.__closure__
except AttributeError:
return
if closure:
for cell in closure:
# avoid infinite recursion
if cell.cell_contents is o:
continue
undecd = undecorated(cell.cell_contents)
if undecd:
return undecd
else:
return o
|
Remove module docstring as we have it on the func
|
Remove module docstring as we have it on the func
|
Python
|
apache-2.0
|
mapleoin/undecorated
|
-
- """Return a function with any decorators removed """
__version__ = '0.1.1'
def undecorated(o):
"""Remove all decorators from a function, method or class"""
# class decorator
if type(o) is type:
return o
try:
# python2
closure = o.func_closure
except AttributeError:
pass
try:
# python3
closure = o.__closure__
except AttributeError:
return
if closure:
for cell in closure:
# avoid infinite recursion
if cell.cell_contents is o:
continue
undecd = undecorated(cell.cell_contents)
if undecd:
return undecd
else:
return o
|
Remove module docstring as we have it on the func
|
## Code Before:
"""Return a function with any decorators removed """
__version__ = '0.1.1'
def undecorated(o):
"""Remove all decorators from a function, method or class"""
# class decorator
if type(o) is type:
return o
try:
# python2
closure = o.func_closure
except AttributeError:
pass
try:
# python3
closure = o.__closure__
except AttributeError:
return
if closure:
for cell in closure:
# avoid infinite recursion
if cell.cell_contents is o:
continue
undecd = undecorated(cell.cell_contents)
if undecd:
return undecd
else:
return o
## Instruction:
Remove module docstring as we have it on the func
## Code After:
__version__ = '0.1.1'
def undecorated(o):
"""Remove all decorators from a function, method or class"""
# class decorator
if type(o) is type:
return o
try:
# python2
closure = o.func_closure
except AttributeError:
pass
try:
# python3
closure = o.__closure__
except AttributeError:
return
if closure:
for cell in closure:
# avoid infinite recursion
if cell.cell_contents is o:
continue
undecd = undecorated(cell.cell_contents)
if undecd:
return undecd
else:
return o
|
-
- """Return a function with any decorators removed """
__version__ = '0.1.1'
def undecorated(o):
"""Remove all decorators from a function, method or class"""
# class decorator
if type(o) is type:
return o
try:
# python2
closure = o.func_closure
except AttributeError:
pass
try:
# python3
closure = o.__closure__
except AttributeError:
return
if closure:
for cell in closure:
# avoid infinite recursion
if cell.cell_contents is o:
continue
undecd = undecorated(cell.cell_contents)
if undecd:
return undecd
else:
return o
|
3849a5a842137a29ce06b5b7e027c8f8efd4e00e
|
shopify/product/admin.py
|
shopify/product/admin.py
|
from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
list_display = ('description', 'product_type', 'account_number')
ordering = ('description',)
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
|
from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
list_display = ('description', 'product_type', 'account_number')
ordering = ('description',)
readonly_fields = ('description', 'product_type', 'product_id')
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
|
Make Shopify product fields readonly
|
Make Shopify product fields readonly
|
Python
|
bsd-3-clause
|
CorbanU/corban-shopify,CorbanU/corban-shopify
|
from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
list_display = ('description', 'product_type', 'account_number')
ordering = ('description',)
+ readonly_fields = ('description', 'product_type', 'product_id')
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
|
Make Shopify product fields readonly
|
## Code Before:
from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
list_display = ('description', 'product_type', 'account_number')
ordering = ('description',)
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
## Instruction:
Make Shopify product fields readonly
## Code After:
from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
list_display = ('description', 'product_type', 'account_number')
ordering = ('description',)
readonly_fields = ('description', 'product_type', 'product_id')
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
|
from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
list_display = ('description', 'product_type', 'account_number')
ordering = ('description',)
+ readonly_fields = ('description', 'product_type', 'product_id')
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
|
91d104a25db499ccef54878dcbfce42dbb4aa932
|
taskin/task.py
|
taskin/task.py
|
import abc
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
class MapTask(object):
def __init__(self, args, task):
self.args = args
self.task = task
self.pool = Pool(cpu_count())
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
|
from multiprocessing import Pool as ProcessPool
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import cpu_count
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
class PoolAPI(object):
def map(self, *args, **kw):
return self.pool.map(*args, **kw)
class ThreadPool(PoolAPI):
def __init__(self, size=20):
self.size = size
self.pool = ThreadPool(self.size)
class ProcessPool(PoolAPI):
def __init__(self, size=None):
self.size = size or cpu_count()
self.pool = ProcessPool(self.size)
class MapTask(object):
pool_types = [
'thread', 'process'
]
def __init__(self, args, task, pool):
self.args = args
self.task = task
self.pool = pool
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
|
Add totally untested pools ;)
|
Add totally untested pools ;)
|
Python
|
bsd-3-clause
|
ionrock/taskin
|
- import abc
+ from multiprocessing import Pool as ProcessPool
+ from multiprocessing.dummy import Pool as ThreadPool
+ from multiprocessing import cpu_count
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
+ class PoolAPI(object):
+ def map(self, *args, **kw):
+ return self.pool.map(*args, **kw)
+
+
+ class ThreadPool(PoolAPI):
+
+ def __init__(self, size=20):
+ self.size = size
+ self.pool = ThreadPool(self.size)
+
+
+ class ProcessPool(PoolAPI):
+
+ def __init__(self, size=None):
+ self.size = size or cpu_count()
+ self.pool = ProcessPool(self.size)
+
+
class MapTask(object):
+ pool_types = [
+ 'thread', 'process'
+ ]
+
- def __init__(self, args, task):
+ def __init__(self, args, task, pool):
self.args = args
self.task = task
- self.pool = Pool(cpu_count())
+ self.pool = pool
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
-
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
|
Add totally untested pools ;)
|
## Code Before:
import abc
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
class MapTask(object):
def __init__(self, args, task):
self.args = args
self.task = task
self.pool = Pool(cpu_count())
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
## Instruction:
Add totally untested pools ;)
## Code After:
from multiprocessing import Pool as ProcessPool
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import cpu_count
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
class PoolAPI(object):
def map(self, *args, **kw):
return self.pool.map(*args, **kw)
class ThreadPool(PoolAPI):
def __init__(self, size=20):
self.size = size
self.pool = ThreadPool(self.size)
class ProcessPool(PoolAPI):
def __init__(self, size=None):
self.size = size or cpu_count()
self.pool = ProcessPool(self.size)
class MapTask(object):
pool_types = [
'thread', 'process'
]
def __init__(self, args, task, pool):
self.args = args
self.task = task
self.pool = pool
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
|
- import abc
+ from multiprocessing import Pool as ProcessPool
+ from multiprocessing.dummy import Pool as ThreadPool
+ from multiprocessing import cpu_count
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
+ class PoolAPI(object):
+ def map(self, *args, **kw):
+ return self.pool.map(*args, **kw)
+
+
+ class ThreadPool(PoolAPI):
+
+ def __init__(self, size=20):
+ self.size = size
+ self.pool = ThreadPool(self.size)
+
+
+ class ProcessPool(PoolAPI):
+
+ def __init__(self, size=None):
+ self.size = size or cpu_count()
+ self.pool = ProcessPool(self.size)
+
+
class MapTask(object):
+ pool_types = [
+ 'thread', 'process'
+ ]
+
- def __init__(self, args, task):
+ def __init__(self, args, task, pool):
? ++++++
self.args = args
self.task = task
- self.pool = Pool(cpu_count())
? ^ -------------
+ self.pool = pool
? ^
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
-
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
|
4e876b59745a67cf1fbcbaacf1ca1675c3e1946a
|
onetime/models.py
|
onetime/models.py
|
from django.db import models
from django.contrib.auth.models import User
class Key(models.Model):
user = models.ForeignKey(User)
key = models.CharField(max_length=40)
created = models.DateTimeField(auto_now_add=True)
usage_left = models.IntegerField(null=True, default=1)
expires = models.DateTimeField(null=True)
next = models.CharField(null=True, max_length=200)
|
from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
class Key(models.Model):
user = models.ForeignKey(User)
key = models.CharField(max_length=40)
created = models.DateTimeField(auto_now_add=True)
usage_left = models.IntegerField(null=True, default=1)
expires = models.DateTimeField(null=True)
next = models.CharField(null=True, max_length=200)
def __unicode__(self):
return '%s (%s)' % (self.key, self.user.username)
def is_valid(self):
if self.usage_left is not None and self.usage_left <= 0:
return False
if self.expires is not None and self.expires < datetime.now():
return False
return True
def update_usage(self):
if self.usage_left is not None:
self.usage_left -= 1
self.save()
|
Add validation and usage logics into the model
|
Add validation and usage logics into the model
|
Python
|
agpl-3.0
|
ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,uploadcare/django-loginurl,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,fajran/django-loginurl,vanschelven/cmsplugin-journal,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website
|
+ from datetime import datetime
+
from django.db import models
from django.contrib.auth.models import User
class Key(models.Model):
user = models.ForeignKey(User)
key = models.CharField(max_length=40)
created = models.DateTimeField(auto_now_add=True)
usage_left = models.IntegerField(null=True, default=1)
expires = models.DateTimeField(null=True)
next = models.CharField(null=True, max_length=200)
+ def __unicode__(self):
+ return '%s (%s)' % (self.key, self.user.username)
+ def is_valid(self):
+ if self.usage_left is not None and self.usage_left <= 0:
+ return False
+ if self.expires is not None and self.expires < datetime.now():
+ return False
+ return True
+
+ def update_usage(self):
+ if self.usage_left is not None:
+ self.usage_left -= 1
+ self.save()
+
+
|
Add validation and usage logics into the model
|
## Code Before:
from django.db import models
from django.contrib.auth.models import User
class Key(models.Model):
user = models.ForeignKey(User)
key = models.CharField(max_length=40)
created = models.DateTimeField(auto_now_add=True)
usage_left = models.IntegerField(null=True, default=1)
expires = models.DateTimeField(null=True)
next = models.CharField(null=True, max_length=200)
## Instruction:
Add validation and usage logics into the model
## Code After:
from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
class Key(models.Model):
user = models.ForeignKey(User)
key = models.CharField(max_length=40)
created = models.DateTimeField(auto_now_add=True)
usage_left = models.IntegerField(null=True, default=1)
expires = models.DateTimeField(null=True)
next = models.CharField(null=True, max_length=200)
def __unicode__(self):
return '%s (%s)' % (self.key, self.user.username)
def is_valid(self):
if self.usage_left is not None and self.usage_left <= 0:
return False
if self.expires is not None and self.expires < datetime.now():
return False
return True
def update_usage(self):
if self.usage_left is not None:
self.usage_left -= 1
self.save()
|
+ from datetime import datetime
+
from django.db import models
from django.contrib.auth.models import User
class Key(models.Model):
user = models.ForeignKey(User)
key = models.CharField(max_length=40)
created = models.DateTimeField(auto_now_add=True)
usage_left = models.IntegerField(null=True, default=1)
expires = models.DateTimeField(null=True)
next = models.CharField(null=True, max_length=200)
+ def __unicode__(self):
+ return '%s (%s)' % (self.key, self.user.username)
+
+ def is_valid(self):
+ if self.usage_left is not None and self.usage_left <= 0:
+ return False
+ if self.expires is not None and self.expires < datetime.now():
+ return False
+ return True
+
+ def update_usage(self):
+ if self.usage_left is not None:
+ self.usage_left -= 1
+ self.save()
+
|
d7c41853277c1df53192b2f879f47f75f3c62fd5
|
server/covmanager/urls.py
|
server/covmanager/urls.py
|
from django.conf.urls import patterns, include, url
from rest_framework import routers
from covmanager import views
router = routers.DefaultRouter()
router.register(r'collections', views.CollectionViewSet, base_name='collections')
router.register(r'repositories', views.RepositoryViewSet, base_name='repositories')
urlpatterns = patterns('',
url(r'^rest/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^repositories/', views.repositories, name="repositories"),
url(r'^collections/$', views.collections, name="collections"),
url(r'^collections/api/$', views.CollectionViewSet.as_view({'get': 'list'}), name="collections_api"),
url(r'^collections/(?P<collectionid>\d+)/browse/$', views.collections_browse, name="collections_browse"),
url(r'^collections/(?P<collectionid>\d+)/browse/api/(?P<path>.*)', views.collections_browse_api, name="collections_browse_api"),
url(r'^rest/', include(router.urls)),
)
|
from django.conf.urls import patterns, include, url
from rest_framework import routers
from covmanager import views
router = routers.DefaultRouter()
router.register(r'collections', views.CollectionViewSet, base_name='collections')
router.register(r'repositories', views.RepositoryViewSet, base_name='repositories')
urlpatterns = patterns('',
url(r'^rest/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^$', views.index, name='index'),
url(r'^repositories/', views.repositories, name="repositories"),
url(r'^collections/$', views.collections, name="collections"),
url(r'^collections/api/$', views.CollectionViewSet.as_view({'get': 'list'}), name="collections_api"),
url(r'^collections/(?P<collectionid>\d+)/browse/$', views.collections_browse, name="collections_browse"),
url(r'^collections/(?P<collectionid>\d+)/browse/api/(?P<path>.*)', views.collections_browse_api, name="collections_browse_api"),
url(r'^rest/', include(router.urls)),
)
|
Add redirect for / to collections
|
[CovManager] Add redirect for / to collections
|
Python
|
mpl-2.0
|
MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager
|
from django.conf.urls import patterns, include, url
from rest_framework import routers
from covmanager import views
router = routers.DefaultRouter()
router.register(r'collections', views.CollectionViewSet, base_name='collections')
router.register(r'repositories', views.RepositoryViewSet, base_name='repositories')
urlpatterns = patterns('',
url(r'^rest/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
+ url(r'^$', views.index, name='index'),
url(r'^repositories/', views.repositories, name="repositories"),
url(r'^collections/$', views.collections, name="collections"),
url(r'^collections/api/$', views.CollectionViewSet.as_view({'get': 'list'}), name="collections_api"),
url(r'^collections/(?P<collectionid>\d+)/browse/$', views.collections_browse, name="collections_browse"),
url(r'^collections/(?P<collectionid>\d+)/browse/api/(?P<path>.*)', views.collections_browse_api, name="collections_browse_api"),
url(r'^rest/', include(router.urls)),
)
+
|
Add redirect for / to collections
|
## Code Before:
from django.conf.urls import patterns, include, url
from rest_framework import routers
from covmanager import views
router = routers.DefaultRouter()
router.register(r'collections', views.CollectionViewSet, base_name='collections')
router.register(r'repositories', views.RepositoryViewSet, base_name='repositories')
urlpatterns = patterns('',
url(r'^rest/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^repositories/', views.repositories, name="repositories"),
url(r'^collections/$', views.collections, name="collections"),
url(r'^collections/api/$', views.CollectionViewSet.as_view({'get': 'list'}), name="collections_api"),
url(r'^collections/(?P<collectionid>\d+)/browse/$', views.collections_browse, name="collections_browse"),
url(r'^collections/(?P<collectionid>\d+)/browse/api/(?P<path>.*)', views.collections_browse_api, name="collections_browse_api"),
url(r'^rest/', include(router.urls)),
)
## Instruction:
Add redirect for / to collections
## Code After:
from django.conf.urls import patterns, include, url
from rest_framework import routers
from covmanager import views
router = routers.DefaultRouter()
router.register(r'collections', views.CollectionViewSet, base_name='collections')
router.register(r'repositories', views.RepositoryViewSet, base_name='repositories')
urlpatterns = patterns('',
url(r'^rest/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^$', views.index, name='index'),
url(r'^repositories/', views.repositories, name="repositories"),
url(r'^collections/$', views.collections, name="collections"),
url(r'^collections/api/$', views.CollectionViewSet.as_view({'get': 'list'}), name="collections_api"),
url(r'^collections/(?P<collectionid>\d+)/browse/$', views.collections_browse, name="collections_browse"),
url(r'^collections/(?P<collectionid>\d+)/browse/api/(?P<path>.*)', views.collections_browse_api, name="collections_browse_api"),
url(r'^rest/', include(router.urls)),
)
|
from django.conf.urls import patterns, include, url
from rest_framework import routers
from covmanager import views
router = routers.DefaultRouter()
router.register(r'collections', views.CollectionViewSet, base_name='collections')
router.register(r'repositories', views.RepositoryViewSet, base_name='repositories')
urlpatterns = patterns('',
url(r'^rest/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
+ url(r'^$', views.index, name='index'),
url(r'^repositories/', views.repositories, name="repositories"),
url(r'^collections/$', views.collections, name="collections"),
url(r'^collections/api/$', views.CollectionViewSet.as_view({'get': 'list'}), name="collections_api"),
url(r'^collections/(?P<collectionid>\d+)/browse/$', views.collections_browse, name="collections_browse"),
url(r'^collections/(?P<collectionid>\d+)/browse/api/(?P<path>.*)', views.collections_browse_api, name="collections_browse_api"),
url(r'^rest/', include(router.urls)),
)
|
eda35123356edd20b361aa2f1d1f20cc7b922e39
|
settings_example.py
|
settings_example.py
|
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Restrict emails by sender.
EMAIL_FROM = '[email protected]'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/[email protected]:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = '[email protected]'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/[email protected]:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
Add CSV file name format setting example
|
Add CSV file name format setting example
|
Python
|
mit
|
AustralianAntarcticDataCentre/save_emails_to_files,AustralianAntarcticDataCentre/save_emails_to_files
|
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
+
+ CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = '[email protected]'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/[email protected]:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
Add CSV file name format setting example
|
## Code Before:
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Restrict emails by sender.
EMAIL_FROM = '[email protected]'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/[email protected]:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
## Instruction:
Add CSV file name format setting example
## Code After:
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = '[email protected]'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/[email protected]:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
+
+ CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = '[email protected]'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/[email protected]:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
ddb91c20793d8e5e8a01e0302afeaaba76776741
|
setuptools/extern/six.py
|
setuptools/extern/six.py
|
import imp
_SIX_SEARCH_PATH = ['setuptools._vendor.six', 'six']
def _find_module(name, path=None):
"""
Alternative to `imp.find_module` that can also search in subpackages.
"""
parts = name.split('.')
for part in parts:
if path is not None:
path = [path]
fh, path, descr = imp.find_module(part, path)
return fh, path, descr
def _import_six(search_path=_SIX_SEARCH_PATH):
for mod_name in search_path:
try:
mod_info = _find_module(mod_name)
except ImportError:
continue
imp.load_module(__name__, *mod_info)
break
else:
raise ImportError(
"The 'six' module of minimum version {0} is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
"distribution.")
_import_six()
|
import imp
_SEARCH_PATH = ['setuptools._vendor.six', 'six']
def _find_module(name, path=None):
"""
Alternative to `imp.find_module` that can also search in subpackages.
"""
parts = name.split('.')
for part in parts:
if path is not None:
path = [path]
fh, path, descr = imp.find_module(part, path)
return fh, path, descr
def _import_in_place(search_path=_SEARCH_PATH):
for mod_name in search_path:
try:
mod_info = _find_module(mod_name)
except ImportError:
continue
imp.load_module(__name__, *mod_info)
break
else:
raise ImportError(
"The '{name}' package is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
"distribution.".format(name=_SEARCH_PATH[-1]))
_import_in_place()
|
Make the technique even more generic
|
Make the technique even more generic
--HG--
branch : feature/issue-229
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
import imp
- _SIX_SEARCH_PATH = ['setuptools._vendor.six', 'six']
+ _SEARCH_PATH = ['setuptools._vendor.six', 'six']
def _find_module(name, path=None):
"""
Alternative to `imp.find_module` that can also search in subpackages.
"""
parts = name.split('.')
for part in parts:
if path is not None:
path = [path]
fh, path, descr = imp.find_module(part, path)
return fh, path, descr
- def _import_six(search_path=_SIX_SEARCH_PATH):
+ def _import_in_place(search_path=_SEARCH_PATH):
for mod_name in search_path:
try:
mod_info = _find_module(mod_name)
except ImportError:
continue
imp.load_module(__name__, *mod_info)
-
break
else:
raise ImportError(
- "The 'six' module of minimum version {0} is required; "
+ "The '{name}' package is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
- "distribution.")
+ "distribution.".format(name=_SEARCH_PATH[-1]))
- _import_six()
+ _import_in_place()
|
Make the technique even more generic
|
## Code Before:
import imp
_SIX_SEARCH_PATH = ['setuptools._vendor.six', 'six']
def _find_module(name, path=None):
"""
Alternative to `imp.find_module` that can also search in subpackages.
"""
parts = name.split('.')
for part in parts:
if path is not None:
path = [path]
fh, path, descr = imp.find_module(part, path)
return fh, path, descr
def _import_six(search_path=_SIX_SEARCH_PATH):
for mod_name in search_path:
try:
mod_info = _find_module(mod_name)
except ImportError:
continue
imp.load_module(__name__, *mod_info)
break
else:
raise ImportError(
"The 'six' module of minimum version {0} is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
"distribution.")
_import_six()
## Instruction:
Make the technique even more generic
## Code After:
import imp
_SEARCH_PATH = ['setuptools._vendor.six', 'six']
def _find_module(name, path=None):
"""
Alternative to `imp.find_module` that can also search in subpackages.
"""
parts = name.split('.')
for part in parts:
if path is not None:
path = [path]
fh, path, descr = imp.find_module(part, path)
return fh, path, descr
def _import_in_place(search_path=_SEARCH_PATH):
for mod_name in search_path:
try:
mod_info = _find_module(mod_name)
except ImportError:
continue
imp.load_module(__name__, *mod_info)
break
else:
raise ImportError(
"The '{name}' package is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
"distribution.".format(name=_SEARCH_PATH[-1]))
_import_in_place()
|
import imp
- _SIX_SEARCH_PATH = ['setuptools._vendor.six', 'six']
? ----
+ _SEARCH_PATH = ['setuptools._vendor.six', 'six']
def _find_module(name, path=None):
"""
Alternative to `imp.find_module` that can also search in subpackages.
"""
parts = name.split('.')
for part in parts:
if path is not None:
path = [path]
fh, path, descr = imp.find_module(part, path)
return fh, path, descr
- def _import_six(search_path=_SIX_SEARCH_PATH):
? - ^ ----
+ def _import_in_place(search_path=_SEARCH_PATH):
? ^^^^^^^
for mod_name in search_path:
try:
mod_info = _find_module(mod_name)
except ImportError:
continue
imp.load_module(__name__, *mod_info)
-
break
else:
raise ImportError(
- "The 'six' module of minimum version {0} is required; "
+ "The '{name}' package is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
- "distribution.")
+ "distribution.".format(name=_SEARCH_PATH[-1]))
- _import_six()
+ _import_in_place()
|
1c516e64518597404e3928d445fb3239748a4861
|
performanceplatform/collector/logging_setup.py
|
performanceplatform/collector/logging_setup.py
|
from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'), app_name))
logger.info("{0} logging started".format(app_name))
|
from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name, json_fields):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
formatter.defaults.update(json_fields)
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path, json_fields=None):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'),
app_name,
json_fields=json_fields if json_fields else {}))
logger.info("{0} logging started".format(app_name))
|
Add `json_fields` parameter to set_up_logging
|
Add `json_fields` parameter to set_up_logging
This will allow the main function to add extra fields to JSON log
messages, for example to pass through command-line arguments.
See https://www.pivotaltracker.com/story/show/70748012
|
Python
|
mit
|
alphagov/performanceplatform-collector,alphagov/performanceplatform-collector,alphagov/performanceplatform-collector
|
from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
- def get_json_log_handler(path, app_name):
+ def get_json_log_handler(path, app_name, json_fields):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
+ formatter.defaults.update(json_fields)
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
- def set_up_logging(app_name, log_level, logfile_path):
+ def set_up_logging(app_name, log_level, logfile_path, json_fields=None):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
- os.path.join(logfile_path, 'collector.log.json'), app_name))
+ os.path.join(logfile_path, 'collector.log.json'),
+ app_name,
+ json_fields=json_fields if json_fields else {}))
logger.info("{0} logging started".format(app_name))
|
Add `json_fields` parameter to set_up_logging
|
## Code Before:
from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'), app_name))
logger.info("{0} logging started".format(app_name))
## Instruction:
Add `json_fields` parameter to set_up_logging
## Code After:
from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name, json_fields):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
formatter.defaults.update(json_fields)
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path, json_fields=None):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'),
app_name,
json_fields=json_fields if json_fields else {}))
logger.info("{0} logging started".format(app_name))
|
from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
- def get_json_log_handler(path, app_name):
+ def get_json_log_handler(path, app_name, json_fields):
? +++++++++++++
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
+ formatter.defaults.update(json_fields)
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
- def set_up_logging(app_name, log_level, logfile_path):
+ def set_up_logging(app_name, log_level, logfile_path, json_fields=None):
? ++++++++++++++++++
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
- os.path.join(logfile_path, 'collector.log.json'), app_name))
? -----------
+ os.path.join(logfile_path, 'collector.log.json'),
+ app_name,
+ json_fields=json_fields if json_fields else {}))
logger.info("{0} logging started".format(app_name))
|
b8eb16ac78c081711236d73e5c099ed734f897ac
|
pyscriptic/refs.py
|
pyscriptic/refs.py
|
from pyscriptic.containers import CONTAINERS
from pyscriptic.storage import STORAGE_LOCATIONS
class Reference(object):
"""
Contains the information to either create or link a given container to a
reference through a protocol via an intermediate name.
Attributes
----------
container_id : str
new : str
store : dict of str, str
discard bool
Notes
-----
.. [1] https://www.transcriptic.com/platform/#instr_access
"""
def __init__(self, container_id=None, new=None, store_where=None, discard=False):
assert (container_id is not None) != (new is not None)
assert (store_where is not None) != (discard)
assert store_where in STORAGE_LOCATIONS.keys() or store_where is None
assert new in CONTAINERS.keys() or new is None
# XXX: Check container id?
self.container_id = container_id
self.new = new
self.store = {"where": store_where}
self.discard = discard
|
from pyscriptic.containers import CONTAINERS, list_containers
from pyscriptic.storage import STORAGE_LOCATIONS
_AVAILABLE_CONTAINERS_IDS = None
def _available_container_ids():
"""
This helper function fetchs a list of all containers available to the
currently active organization. It then stores the container IDs so that we
can compare against them later when creating new References.
Returns
-------
set of str
"""
global _AVAILABLE_CONTAINERS_IDS
if _AVAILABLE_CONTAINERS_IDS is not None:
return _AVAILABLE_CONTAINERS_IDS
_AVAILABLE_CONTAINERS_IDS = set(i.container_id for i in list_containers())
class Reference(object):
"""
Contains the information to either create or link a given container to a
reference through a protocol via an intermediate name.
Attributes
----------
container_id : str
new : str
store : dict of str, str
discard bool
Notes
-----
.. [1] https://www.transcriptic.com/platform/#instr_access
"""
def __init__(self, container_id=None, new=None, store_where=None, discard=False):
assert (container_id is not None) != (new is not None)
assert (store_where is not None) != (discard)
assert store_where in STORAGE_LOCATIONS.keys() or store_where is None
assert new in CONTAINERS.keys() or new is None
if container_id is not None:
assert container_id in _available_container_ids()
self.container_id = container_id
self.new = new
self.store = {"where": store_where}
self.discard = discard
|
Check container IDs when making new References
|
Check container IDs when making new References
|
Python
|
bsd-2-clause
|
naderm/pytranscriptic,naderm/pytranscriptic
|
- from pyscriptic.containers import CONTAINERS
+ from pyscriptic.containers import CONTAINERS, list_containers
from pyscriptic.storage import STORAGE_LOCATIONS
+
+ _AVAILABLE_CONTAINERS_IDS = None
+
+ def _available_container_ids():
+ """
+ This helper function fetchs a list of all containers available to the
+ currently active organization. It then stores the container IDs so that we
+ can compare against them later when creating new References.
+
+ Returns
+ -------
+ set of str
+ """
+
+ global _AVAILABLE_CONTAINERS_IDS
+
+ if _AVAILABLE_CONTAINERS_IDS is not None:
+ return _AVAILABLE_CONTAINERS_IDS
+
+ _AVAILABLE_CONTAINERS_IDS = set(i.container_id for i in list_containers())
class Reference(object):
"""
Contains the information to either create or link a given container to a
reference through a protocol via an intermediate name.
Attributes
----------
container_id : str
new : str
store : dict of str, str
discard bool
Notes
-----
.. [1] https://www.transcriptic.com/platform/#instr_access
"""
def __init__(self, container_id=None, new=None, store_where=None, discard=False):
assert (container_id is not None) != (new is not None)
assert (store_where is not None) != (discard)
assert store_where in STORAGE_LOCATIONS.keys() or store_where is None
assert new in CONTAINERS.keys() or new is None
- # XXX: Check container id?
+ if container_id is not None:
+ assert container_id in _available_container_ids()
+
self.container_id = container_id
self.new = new
self.store = {"where": store_where}
self.discard = discard
|
Check container IDs when making new References
|
## Code Before:
from pyscriptic.containers import CONTAINERS
from pyscriptic.storage import STORAGE_LOCATIONS
class Reference(object):
"""
Contains the information to either create or link a given container to a
reference through a protocol via an intermediate name.
Attributes
----------
container_id : str
new : str
store : dict of str, str
discard bool
Notes
-----
.. [1] https://www.transcriptic.com/platform/#instr_access
"""
def __init__(self, container_id=None, new=None, store_where=None, discard=False):
assert (container_id is not None) != (new is not None)
assert (store_where is not None) != (discard)
assert store_where in STORAGE_LOCATIONS.keys() or store_where is None
assert new in CONTAINERS.keys() or new is None
# XXX: Check container id?
self.container_id = container_id
self.new = new
self.store = {"where": store_where}
self.discard = discard
## Instruction:
Check container IDs when making new References
## Code After:
from pyscriptic.containers import CONTAINERS, list_containers
from pyscriptic.storage import STORAGE_LOCATIONS
_AVAILABLE_CONTAINERS_IDS = None
def _available_container_ids():
"""
This helper function fetchs a list of all containers available to the
currently active organization. It then stores the container IDs so that we
can compare against them later when creating new References.
Returns
-------
set of str
"""
global _AVAILABLE_CONTAINERS_IDS
if _AVAILABLE_CONTAINERS_IDS is not None:
return _AVAILABLE_CONTAINERS_IDS
_AVAILABLE_CONTAINERS_IDS = set(i.container_id for i in list_containers())
class Reference(object):
"""
Contains the information to either create or link a given container to a
reference through a protocol via an intermediate name.
Attributes
----------
container_id : str
new : str
store : dict of str, str
discard bool
Notes
-----
.. [1] https://www.transcriptic.com/platform/#instr_access
"""
def __init__(self, container_id=None, new=None, store_where=None, discard=False):
assert (container_id is not None) != (new is not None)
assert (store_where is not None) != (discard)
assert store_where in STORAGE_LOCATIONS.keys() or store_where is None
assert new in CONTAINERS.keys() or new is None
if container_id is not None:
assert container_id in _available_container_ids()
self.container_id = container_id
self.new = new
self.store = {"where": store_where}
self.discard = discard
|
- from pyscriptic.containers import CONTAINERS
+ from pyscriptic.containers import CONTAINERS, list_containers
? +++++++++++++++++
from pyscriptic.storage import STORAGE_LOCATIONS
+
+ _AVAILABLE_CONTAINERS_IDS = None
+
+ def _available_container_ids():
+ """
+ This helper function fetchs a list of all containers available to the
+ currently active organization. It then stores the container IDs so that we
+ can compare against them later when creating new References.
+
+ Returns
+ -------
+ set of str
+ """
+
+ global _AVAILABLE_CONTAINERS_IDS
+
+ if _AVAILABLE_CONTAINERS_IDS is not None:
+ return _AVAILABLE_CONTAINERS_IDS
+
+ _AVAILABLE_CONTAINERS_IDS = set(i.container_id for i in list_containers())
class Reference(object):
"""
Contains the information to either create or link a given container to a
reference through a protocol via an intermediate name.
Attributes
----------
container_id : str
new : str
store : dict of str, str
discard bool
Notes
-----
.. [1] https://www.transcriptic.com/platform/#instr_access
"""
def __init__(self, container_id=None, new=None, store_where=None, discard=False):
assert (container_id is not None) != (new is not None)
assert (store_where is not None) != (discard)
assert store_where in STORAGE_LOCATIONS.keys() or store_where is None
assert new in CONTAINERS.keys() or new is None
- # XXX: Check container id?
+ if container_id is not None:
+ assert container_id in _available_container_ids()
+
self.container_id = container_id
self.new = new
self.store = {"where": store_where}
self.discard = discard
|
9e41011a5f164732ffd33ba5ca5edc7813735aeb
|
bundle_data.py
|
bundle_data.py
|
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def main():
if len(sys.argv) != 4:
print('Usage: bundle_data.py <input dir> <output dir> <samples per bundle>')
exit(1)
p = sys.argv[1]
b = sys.argv[2]
lim = int(sys.argv[3])
x = []
y = []
for d in glob.glob(os.path.join(p, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
x = []
y = []
if __name__ == '__main__':
main()
|
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def pack(b, x, y):
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
def main():
if len(sys.argv) < 4:
print('Usage: bundle_data.py <output dir> <samples per bundle> <input dir1> [input dir2] ...')
exit(1)
o = sys.argv[1]
lim = int(sys.argv[2])
inputs = sys.argv[3:]
x = []
y = []
for i in inputs:
for d in glob.glob(os.path.join(i, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
pack(o, x, y)
x = []
y = []
# Pack any leftovers
if x:
pack(o, x, y)
if __name__ == '__main__':
main()
|
Fix saving when number of items is less than configured bundle size
|
Fix saving when number of items is less than configured bundle size
|
Python
|
apache-2.0
|
baudm/HomographyNet
|
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
+ def pack(b, x, y):
+ name = str(uuid.uuid4())
+ pack = os.path.join(b, name + '.npz')
+ with open(pack, 'wb') as f:
+ np.savez(f, images=np.stack(x), offsets=np.stack(y))
+ print('packed:', pack)
+
+
def main():
- if len(sys.argv) != 4:
+ if len(sys.argv) < 4:
- print('Usage: bundle_data.py <input dir> <output dir> <samples per bundle>')
+ print('Usage: bundle_data.py <output dir> <samples per bundle> <input dir1> [input dir2] ...')
exit(1)
- p = sys.argv[1]
+ o = sys.argv[1]
- b = sys.argv[2]
- lim = int(sys.argv[3])
+ lim = int(sys.argv[2])
+ inputs = sys.argv[3:]
x = []
y = []
+ for i in inputs:
- for d in glob.glob(os.path.join(p, '*.dat')):
+ for d in glob.glob(os.path.join(i, '*.dat')):
- with open(d, 'rb') as f:
+ with open(d, 'rb') as f:
- im, l = pickle.load(f)
+ im, l = pickle.load(f)
- x.append(im)
+ x.append(im)
- y.append(l)
+ y.append(l)
- if len(y) >= lim:
+ if len(y) >= lim:
+ pack(o, x, y)
- name = str(uuid.uuid4())
- pack = os.path.join(b, name + '.npz')
- with open(pack, 'wb') as f:
- np.savez(f, images=np.stack(x), offsets=np.stack(y))
- print('packed:', pack)
- x = []
+ x = []
- y = []
+ y = []
+ # Pack any leftovers
+ if x:
+ pack(o, x, y)
if __name__ == '__main__':
main()
|
Fix saving when number of items is less than configured bundle size
|
## Code Before:
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def main():
if len(sys.argv) != 4:
print('Usage: bundle_data.py <input dir> <output dir> <samples per bundle>')
exit(1)
p = sys.argv[1]
b = sys.argv[2]
lim = int(sys.argv[3])
x = []
y = []
for d in glob.glob(os.path.join(p, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
x = []
y = []
if __name__ == '__main__':
main()
## Instruction:
Fix saving when number of items is less than configured bundle size
## Code After:
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def pack(b, x, y):
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
def main():
if len(sys.argv) < 4:
print('Usage: bundle_data.py <output dir> <samples per bundle> <input dir1> [input dir2] ...')
exit(1)
o = sys.argv[1]
lim = int(sys.argv[2])
inputs = sys.argv[3:]
x = []
y = []
for i in inputs:
for d in glob.glob(os.path.join(i, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
pack(o, x, y)
x = []
y = []
# Pack any leftovers
if x:
pack(o, x, y)
if __name__ == '__main__':
main()
|
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
+ def pack(b, x, y):
+ name = str(uuid.uuid4())
+ pack = os.path.join(b, name + '.npz')
+ with open(pack, 'wb') as f:
+ np.savez(f, images=np.stack(x), offsets=np.stack(y))
+ print('packed:', pack)
+
+
def main():
- if len(sys.argv) != 4:
? ^^
+ if len(sys.argv) < 4:
? ^
- print('Usage: bundle_data.py <input dir> <output dir> <samples per bundle>')
? ------------
+ print('Usage: bundle_data.py <output dir> <samples per bundle> <input dir1> [input dir2] ...')
? ++++++++++++++++++++++++++++++
exit(1)
- p = sys.argv[1]
? ^
+ o = sys.argv[1]
? ^
- b = sys.argv[2]
- lim = int(sys.argv[3])
? ^
+ lim = int(sys.argv[2])
? ^
+ inputs = sys.argv[3:]
x = []
y = []
+ for i in inputs:
- for d in glob.glob(os.path.join(p, '*.dat')):
? ^
+ for d in glob.glob(os.path.join(i, '*.dat')):
? ++++ ^
- with open(d, 'rb') as f:
+ with open(d, 'rb') as f:
? ++++
- im, l = pickle.load(f)
+ im, l = pickle.load(f)
? ++++
- x.append(im)
+ x.append(im)
? ++++
- y.append(l)
+ y.append(l)
? ++++
- if len(y) >= lim:
+ if len(y) >= lim:
? ++++
+ pack(o, x, y)
- name = str(uuid.uuid4())
- pack = os.path.join(b, name + '.npz')
- with open(pack, 'wb') as f:
- np.savez(f, images=np.stack(x), offsets=np.stack(y))
- print('packed:', pack)
- x = []
+ x = []
? ++++
- y = []
+ y = []
? ++++
+ # Pack any leftovers
+ if x:
+ pack(o, x, y)
if __name__ == '__main__':
main()
|
e5310f1147694992d1eb48a037261f805955294c
|
tests/unit/forms/widget_tests.py
|
tests/unit/forms/widget_tests.py
|
import nose
from oscar.forms import widgets
def test_datetime_to_date_format_conversion():
format_testcases = (
('%Y-%m-%d', 'yy-mm-dd'),
('%Y-%m-%d %H:%M', 'yy-mm-dd'),
)
def compare(format, expected):
nose.tools.eq_(
widgets.datetime_format_to_js_date_format(format), expected)
for format, expected in format_testcases:
yield compare, format, expected
def test_datetime_to_time_format_conversion():
format_testcases = (
('%Y-%m-%d', ''),
('%Y-%m-%d %H:%M', 'HH:mm'),
('%d/%m/%Y', ''),
)
def compare(format, expected):
nose.tools.eq_(
widgets.datetime_format_to_js_time_format(format), expected)
for format, expected in format_testcases:
yield compare, format, expected
|
import nose
from oscar.forms import widgets
def compare_date_format(format, expected):
nose.tools.eq_(
widgets.datetime_format_to_js_date_format(format), expected)
def test_datetime_to_date_format_conversion():
format_testcases = (
('%Y-%m-%d', 'yy-mm-dd'),
('%Y-%m-%d %H:%M', 'yy-mm-dd'),
)
for format, expected in format_testcases:
yield compare_date_format, format, expected
def compare_time_format(format, expected):
nose.tools.eq_(
widgets.datetime_format_to_js_time_format(format), expected)
def test_datetime_to_time_format_conversion():
format_testcases = (
('%Y-%m-%d', ''),
('%Y-%m-%d %H:%M', 'HH:mm'),
('%d/%m/%Y', ''),
)
for format, expected in format_testcases:
yield compare_time_format, format, expected
|
Adjust test generator to make tests pass
|
Adjust test generator to make tests pass
|
Python
|
bsd-3-clause
|
amirrpp/django-oscar,Jannes123/django-oscar,jmt4/django-oscar,marcoantoniooliveira/labweb,taedori81/django-oscar,kapt/django-oscar,josesanch/django-oscar,ademuk/django-oscar,pasqualguerrero/django-oscar,dongguangming/django-oscar,amirrpp/django-oscar,pdonadeo/django-oscar,nickpack/django-oscar,bnprk/django-oscar,jlmadurga/django-oscar,ka7eh/django-oscar,ahmetdaglarbas/e-commerce,MatthewWilkes/django-oscar,nfletton/django-oscar,kapari/django-oscar,jmt4/django-oscar,DrOctogon/unwash_ecom,john-parton/django-oscar,lijoantony/django-oscar,vovanbo/django-oscar,django-oscar/django-oscar,elliotthill/django-oscar,manevant/django-oscar,saadatqadri/django-oscar,binarydud/django-oscar,Idematica/django-oscar,kapari/django-oscar,adamend/django-oscar,lijoantony/django-oscar,spartonia/django-oscar,DrOctogon/unwash_ecom,jinnykoo/wuyisj,Idematica/django-oscar,monikasulik/django-oscar,ahmetdaglarbas/e-commerce,adamend/django-oscar,jmt4/django-oscar,kapt/django-oscar,machtfit/django-oscar,itbabu/django-oscar,mexeniz/django-oscar,Bogh/django-oscar,dongguangming/django-oscar,lijoantony/django-oscar,saadatqadri/django-oscar,WillisXChen/django-oscar,pdonadeo/django-oscar,manevant/django-oscar,pasqualguerrero/django-oscar,monikasulik/django-oscar,pdonadeo/django-oscar,adamend/django-oscar,bschuon/django-oscar,okfish/django-oscar,faratro/django-oscar,bschuon/django-oscar,faratro/django-oscar,QLGu/django-oscar,eddiep1101/django-oscar,jinnykoo/wuyisj.com,faratro/django-oscar,okfish/django-oscar,ka7eh/django-oscar,rocopartners/django-oscar,WadeYuChen/django-oscar,Idematica/django-oscar,josesanch/django-oscar,marcoantoniooliveira/labweb,WillisXChen/django-oscar,pasqualguerrero/django-oscar,faratro/django-oscar,binarydud/django-oscar,WillisXChen/django-oscar,solarissmoke/django-oscar,WadeYuChen/django-oscar,sasha0/django-oscar,django-oscar/django-oscar,rocopartners/django-oscar,rocopartners/django-oscar,bnprk/django-oscar,binarydud/django-oscar,okfish/django-oscar,jinnykoo/wuyisj.com,MatthewWilkes/django-oscar,eddiep1101/django-oscar,makielab/django-oscar,solarissmoke/django-oscar,lijoantony/django-oscar,rocopartners/django-oscar,spartonia/django-oscar,nfletton/django-oscar,QLGu/django-oscar,spartonia/django-oscar,saadatqadri/django-oscar,WillisXChen/django-oscar,jinnykoo/wuyisj.com,django-oscar/django-oscar,makielab/django-oscar,nickpack/django-oscar,Bogh/django-oscar,taedori81/django-oscar,thechampanurag/django-oscar,sonofatailor/django-oscar,jinnykoo/wuyisj,dongguangming/django-oscar,solarissmoke/django-oscar,mexeniz/django-oscar,jinnykoo/wuyisj,spartonia/django-oscar,josesanch/django-oscar,bschuon/django-oscar,marcoantoniooliveira/labweb,saadatqadri/django-oscar,jinnykoo/christmas,vovanbo/django-oscar,Jannes123/django-oscar,nfletton/django-oscar,binarydud/django-oscar,kapari/django-oscar,jlmadurga/django-oscar,amirrpp/django-oscar,thechampanurag/django-oscar,elliotthill/django-oscar,michaelkuty/django-oscar,amirrpp/django-oscar,jinnykoo/christmas,nickpack/django-oscar,pasqualguerrero/django-oscar,nfletton/django-oscar,solarissmoke/django-oscar,anentropic/django-oscar,django-oscar/django-oscar,bschuon/django-oscar,makielab/django-oscar,anentropic/django-oscar,QLGu/django-oscar,WillisXChen/django-oscar,thechampanurag/django-oscar,MatthewWilkes/django-oscar,john-parton/django-oscar,jmt4/django-oscar,okfish/django-oscar,taedori81/django-oscar,adamend/django-oscar,michaelkuty/django-oscar,john-parton/django-oscar,taedori81/django-oscar,ademuk/django-oscar,itbabu/django-oscar,itbabu/django-oscar,michaelkuty/django-oscar,WadeYuChen/django-oscar,itbabu/django-oscar,jlmadurga/django-oscar,mexeniz/django-oscar,QLGu/django-oscar,bnprk/django-oscar,WadeYuChen/django-oscar,michaelkuty/django-oscar,Bogh/django-oscar,manevant/django-oscar,manevant/django-oscar,ka7eh/django-oscar,sasha0/django-oscar,bnprk/django-oscar,MatthewWilkes/django-oscar,marcoantoniooliveira/labweb,john-parton/django-oscar,sasha0/django-oscar,machtfit/django-oscar,sonofatailor/django-oscar,jlmadurga/django-oscar,Jannes123/django-oscar,ademuk/django-oscar,elliotthill/django-oscar,sonofatailor/django-oscar,pdonadeo/django-oscar,kapari/django-oscar,anentropic/django-oscar,sonofatailor/django-oscar,ademuk/django-oscar,nickpack/django-oscar,ahmetdaglarbas/e-commerce,Jannes123/django-oscar,kapt/django-oscar,Bogh/django-oscar,anentropic/django-oscar,sasha0/django-oscar,dongguangming/django-oscar,eddiep1101/django-oscar,thechampanurag/django-oscar,jinnykoo/wuyisj.com,eddiep1101/django-oscar,machtfit/django-oscar,WillisXChen/django-oscar,jinnykoo/christmas,DrOctogon/unwash_ecom,makielab/django-oscar,vovanbo/django-oscar,vovanbo/django-oscar,ahmetdaglarbas/e-commerce,monikasulik/django-oscar,jinnykoo/wuyisj,ka7eh/django-oscar,monikasulik/django-oscar,mexeniz/django-oscar
|
import nose
from oscar.forms import widgets
+
+
+ def compare_date_format(format, expected):
+ nose.tools.eq_(
+ widgets.datetime_format_to_js_date_format(format), expected)
def test_datetime_to_date_format_conversion():
format_testcases = (
('%Y-%m-%d', 'yy-mm-dd'),
('%Y-%m-%d %H:%M', 'yy-mm-dd'),
)
+ for format, expected in format_testcases:
+ yield compare_date_format, format, expected
- def compare(format, expected):
- nose.tools.eq_(
- widgets.datetime_format_to_js_date_format(format), expected)
- for format, expected in format_testcases:
- yield compare, format, expected
+ def compare_time_format(format, expected):
+ nose.tools.eq_(
+ widgets.datetime_format_to_js_time_format(format), expected)
def test_datetime_to_time_format_conversion():
format_testcases = (
('%Y-%m-%d', ''),
('%Y-%m-%d %H:%M', 'HH:mm'),
('%d/%m/%Y', ''),
)
+ for format, expected in format_testcases:
+ yield compare_time_format, format, expected
- def compare(format, expected):
- nose.tools.eq_(
- widgets.datetime_format_to_js_time_format(format), expected)
-
- for format, expected in format_testcases:
- yield compare, format, expected
-
|
Adjust test generator to make tests pass
|
## Code Before:
import nose
from oscar.forms import widgets
def test_datetime_to_date_format_conversion():
format_testcases = (
('%Y-%m-%d', 'yy-mm-dd'),
('%Y-%m-%d %H:%M', 'yy-mm-dd'),
)
def compare(format, expected):
nose.tools.eq_(
widgets.datetime_format_to_js_date_format(format), expected)
for format, expected in format_testcases:
yield compare, format, expected
def test_datetime_to_time_format_conversion():
format_testcases = (
('%Y-%m-%d', ''),
('%Y-%m-%d %H:%M', 'HH:mm'),
('%d/%m/%Y', ''),
)
def compare(format, expected):
nose.tools.eq_(
widgets.datetime_format_to_js_time_format(format), expected)
for format, expected in format_testcases:
yield compare, format, expected
## Instruction:
Adjust test generator to make tests pass
## Code After:
import nose
from oscar.forms import widgets
def compare_date_format(format, expected):
nose.tools.eq_(
widgets.datetime_format_to_js_date_format(format), expected)
def test_datetime_to_date_format_conversion():
format_testcases = (
('%Y-%m-%d', 'yy-mm-dd'),
('%Y-%m-%d %H:%M', 'yy-mm-dd'),
)
for format, expected in format_testcases:
yield compare_date_format, format, expected
def compare_time_format(format, expected):
nose.tools.eq_(
widgets.datetime_format_to_js_time_format(format), expected)
def test_datetime_to_time_format_conversion():
format_testcases = (
('%Y-%m-%d', ''),
('%Y-%m-%d %H:%M', 'HH:mm'),
('%d/%m/%Y', ''),
)
for format, expected in format_testcases:
yield compare_time_format, format, expected
|
import nose
from oscar.forms import widgets
+
+
+ def compare_date_format(format, expected):
+ nose.tools.eq_(
+ widgets.datetime_format_to_js_date_format(format), expected)
def test_datetime_to_date_format_conversion():
format_testcases = (
('%Y-%m-%d', 'yy-mm-dd'),
('%Y-%m-%d %H:%M', 'yy-mm-dd'),
)
+ for format, expected in format_testcases:
+ yield compare_date_format, format, expected
- def compare(format, expected):
- nose.tools.eq_(
- widgets.datetime_format_to_js_date_format(format), expected)
- for format, expected in format_testcases:
- yield compare, format, expected
+ def compare_time_format(format, expected):
+ nose.tools.eq_(
+ widgets.datetime_format_to_js_time_format(format), expected)
def test_datetime_to_time_format_conversion():
format_testcases = (
('%Y-%m-%d', ''),
('%Y-%m-%d %H:%M', 'HH:mm'),
('%d/%m/%Y', ''),
)
-
- def compare(format, expected):
- nose.tools.eq_(
- widgets.datetime_format_to_js_time_format(format), expected)
-
for format, expected in format_testcases:
- yield compare, format, expected
+ yield compare_time_format, format, expected
? ++++++++++++
|
901e6cc8bdafcd6e6d419ffd5eee4e58d266d40a
|
extensions.py
|
extensions.py
|
import subprocess
from functools import wraps
import os
extensions = {}
def extension(f):
# keep unwrapped function
unwrapped = f
@wraps(f)
def wrapper(**kwargs):
wrapper.settings = dict(kwargs)
return unwrapped
extensions[f.__name__] = wrapper
return wrapper
@extension
def coffee(filename, data):
command = ['coffee', '-c', '-s']
bare = coffee.settings.get('bare')
if bare:
command.append('-b')
process = subprocess.Popen(command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate(data)
if process.returncode:
return None, err
else:
_, ext = os.path.splitext(filename)
dest = filename.replace(ext, '.js')
return dest, out
@extension
def dest(filename, data):
destination = dest.settings.get('destination')
if destination:
filename = os.path.join(destination, tail)
fo = open(filename, 'w')
fo.write(data)
fo.close()
|
import subprocess
from functools import wraps
import os
extensions = {}
def extension(f):
# keep unwrapped function
unwrapped = f
@wraps(f)
def wrapper(**kwargs):
wrapper.settings = dict(kwargs)
return unwrapped
extensions[f.__name__] = wrapper
return wrapper
@extension
def coffee(filename, data):
command = ['coffee', '-c', '-s']
bare = coffee.settings.get('bare')
if bare:
command.append('-b')
process = subprocess.Popen(command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate(data)
if process.returncode:
return None, err
else:
_, ext = os.path.splitext(filename)
dest = filename.replace(ext, '.js')
return dest, out
@extension
def dest(filename, data):
destination = dest.settings.get('destination')
if destination:
if not os.path.exists(destination):
os.mkdir(destination)
_, tail = os.path.split(filename)
filename = os.path.join(destination, tail)
fo = open(filename, 'w')
fo.write(data)
fo.close()
|
Fix file not found error on directory
|
Fix file not found error on directory
|
Python
|
mit
|
rolurq/flask-gulp
|
import subprocess
from functools import wraps
import os
extensions = {}
def extension(f):
# keep unwrapped function
unwrapped = f
@wraps(f)
def wrapper(**kwargs):
wrapper.settings = dict(kwargs)
return unwrapped
extensions[f.__name__] = wrapper
return wrapper
@extension
def coffee(filename, data):
command = ['coffee', '-c', '-s']
bare = coffee.settings.get('bare')
if bare:
command.append('-b')
process = subprocess.Popen(command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate(data)
if process.returncode:
return None, err
else:
_, ext = os.path.splitext(filename)
dest = filename.replace(ext, '.js')
return dest, out
+
@extension
def dest(filename, data):
destination = dest.settings.get('destination')
if destination:
+ if not os.path.exists(destination):
+ os.mkdir(destination)
+ _, tail = os.path.split(filename)
filename = os.path.join(destination, tail)
fo = open(filename, 'w')
fo.write(data)
fo.close()
|
Fix file not found error on directory
|
## Code Before:
import subprocess
from functools import wraps
import os
extensions = {}
def extension(f):
# keep unwrapped function
unwrapped = f
@wraps(f)
def wrapper(**kwargs):
wrapper.settings = dict(kwargs)
return unwrapped
extensions[f.__name__] = wrapper
return wrapper
@extension
def coffee(filename, data):
command = ['coffee', '-c', '-s']
bare = coffee.settings.get('bare')
if bare:
command.append('-b')
process = subprocess.Popen(command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate(data)
if process.returncode:
return None, err
else:
_, ext = os.path.splitext(filename)
dest = filename.replace(ext, '.js')
return dest, out
@extension
def dest(filename, data):
destination = dest.settings.get('destination')
if destination:
filename = os.path.join(destination, tail)
fo = open(filename, 'w')
fo.write(data)
fo.close()
## Instruction:
Fix file not found error on directory
## Code After:
import subprocess
from functools import wraps
import os
extensions = {}
def extension(f):
# keep unwrapped function
unwrapped = f
@wraps(f)
def wrapper(**kwargs):
wrapper.settings = dict(kwargs)
return unwrapped
extensions[f.__name__] = wrapper
return wrapper
@extension
def coffee(filename, data):
command = ['coffee', '-c', '-s']
bare = coffee.settings.get('bare')
if bare:
command.append('-b')
process = subprocess.Popen(command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate(data)
if process.returncode:
return None, err
else:
_, ext = os.path.splitext(filename)
dest = filename.replace(ext, '.js')
return dest, out
@extension
def dest(filename, data):
destination = dest.settings.get('destination')
if destination:
if not os.path.exists(destination):
os.mkdir(destination)
_, tail = os.path.split(filename)
filename = os.path.join(destination, tail)
fo = open(filename, 'w')
fo.write(data)
fo.close()
|
import subprocess
from functools import wraps
import os
extensions = {}
def extension(f):
# keep unwrapped function
unwrapped = f
@wraps(f)
def wrapper(**kwargs):
wrapper.settings = dict(kwargs)
return unwrapped
extensions[f.__name__] = wrapper
return wrapper
@extension
def coffee(filename, data):
command = ['coffee', '-c', '-s']
bare = coffee.settings.get('bare')
if bare:
command.append('-b')
process = subprocess.Popen(command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate(data)
if process.returncode:
return None, err
else:
_, ext = os.path.splitext(filename)
dest = filename.replace(ext, '.js')
return dest, out
+
@extension
def dest(filename, data):
destination = dest.settings.get('destination')
if destination:
+ if not os.path.exists(destination):
+ os.mkdir(destination)
+ _, tail = os.path.split(filename)
filename = os.path.join(destination, tail)
fo = open(filename, 'w')
fo.write(data)
fo.close()
|
6ee135dc454b7ae13dbd4603de60b5eba12cc5c9
|
saleor/graphql/core/decorators.py
|
saleor/graphql/core/decorators.py
|
from functools import wraps
from django.core.exceptions import PermissionDenied
def permission_required(permissions):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
info = args[1]
user = info.context.user
if not user.has_perm(permissions):
raise PermissionDenied(
'You have no permission to use %s' % info.field_name)
return func(*args, **kwargs)
return wrapper
return decorator
|
from functools import wraps
from django.core.exceptions import PermissionDenied
from graphql.execution.base import ResolveInfo
def permission_required(permissions):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
info = args[1]
assert isinstance(info, ResolveInfo)
user = info.context.user
if not user.has_perm(permissions):
raise PermissionDenied(
'You have no permission to use %s' % info.field_name)
return func(*args, **kwargs)
return wrapper
return decorator
|
Make sure decorator is being used with proper function signatures
|
Make sure decorator is being used with proper function signatures
|
Python
|
bsd-3-clause
|
UITools/saleor,UITools/saleor,mociepka/saleor,mociepka/saleor,maferelo/saleor,maferelo/saleor,UITools/saleor,maferelo/saleor,mociepka/saleor,UITools/saleor,UITools/saleor
|
from functools import wraps
from django.core.exceptions import PermissionDenied
+ from graphql.execution.base import ResolveInfo
def permission_required(permissions):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
info = args[1]
+ assert isinstance(info, ResolveInfo)
user = info.context.user
if not user.has_perm(permissions):
raise PermissionDenied(
'You have no permission to use %s' % info.field_name)
return func(*args, **kwargs)
return wrapper
return decorator
|
Make sure decorator is being used with proper function signatures
|
## Code Before:
from functools import wraps
from django.core.exceptions import PermissionDenied
def permission_required(permissions):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
info = args[1]
user = info.context.user
if not user.has_perm(permissions):
raise PermissionDenied(
'You have no permission to use %s' % info.field_name)
return func(*args, **kwargs)
return wrapper
return decorator
## Instruction:
Make sure decorator is being used with proper function signatures
## Code After:
from functools import wraps
from django.core.exceptions import PermissionDenied
from graphql.execution.base import ResolveInfo
def permission_required(permissions):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
info = args[1]
assert isinstance(info, ResolveInfo)
user = info.context.user
if not user.has_perm(permissions):
raise PermissionDenied(
'You have no permission to use %s' % info.field_name)
return func(*args, **kwargs)
return wrapper
return decorator
|
from functools import wraps
from django.core.exceptions import PermissionDenied
+ from graphql.execution.base import ResolveInfo
def permission_required(permissions):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
info = args[1]
+ assert isinstance(info, ResolveInfo)
user = info.context.user
if not user.has_perm(permissions):
raise PermissionDenied(
'You have no permission to use %s' % info.field_name)
return func(*args, **kwargs)
return wrapper
return decorator
|
f3ec0593bb67db25c4f5af4b3b00d82d5e4e0f04
|
csv2ofx/mappings/gls.py
|
csv2ofx/mappings/gls.py
|
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
Add import for other python versions
|
Add import for other python versions
|
Python
|
mit
|
reubano/csv2ofx,reubano/csv2ofx
|
+ from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
Add import for other python versions
|
## Code Before:
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
## Instruction:
Add import for other python versions
## Code After:
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
+ from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
5a09c6e9545373cece95f87ed28579f05959fced
|
tests/skip_check.py
|
tests/skip_check.py
|
from __future__ import absolute_import, division, print_function
import pytest
def skip_check(name, iface, item):
if name in item.keywords and item.funcargs.get('backend') is not None:
if not isinstance(item.funcargs['backend'], iface):
pytest.skip("Backend does not support {0}".format(name))
|
from __future__ import absolute_import, division, print_function
import pytest
def skip_check(name, iface, item):
if name in item.keywords and "backend" in item.funcargs:
if not isinstance(item.funcargs["backend"], iface):
pytest.skip("{0} backend does not support {1}".format(
item.funcargs["backend"], name
))
|
Include teh name of the backend in the error message
|
Include teh name of the backend in the error message
|
Python
|
bsd-3-clause
|
Hasimir/cryptography,skeuomorf/cryptography,skeuomorf/cryptography,dstufft/cryptography,bwhmather/cryptography,skeuomorf/cryptography,Lukasa/cryptography,bwhmather/cryptography,Hasimir/cryptography,kimvais/cryptography,sholsapp/cryptography,Ayrx/cryptography,Hasimir/cryptography,sholsapp/cryptography,dstufft/cryptography,Lukasa/cryptography,sholsapp/cryptography,Ayrx/cryptography,dstufft/cryptography,dstufft/cryptography,bwhmather/cryptography,glyph/cryptography,bwhmather/cryptography,kimvais/cryptography,kimvais/cryptography,Ayrx/cryptography,dstufft/cryptography,glyph/cryptography,skeuomorf/cryptography,Lukasa/cryptography,kimvais/cryptography,sholsapp/cryptography,Ayrx/cryptography,Hasimir/cryptography
|
from __future__ import absolute_import, division, print_function
import pytest
def skip_check(name, iface, item):
- if name in item.keywords and item.funcargs.get('backend') is not None:
+ if name in item.keywords and "backend" in item.funcargs:
- if not isinstance(item.funcargs['backend'], iface):
+ if not isinstance(item.funcargs["backend"], iface):
- pytest.skip("Backend does not support {0}".format(name))
+ pytest.skip("{0} backend does not support {1}".format(
+ item.funcargs["backend"], name
+ ))
|
Include teh name of the backend in the error message
|
## Code Before:
from __future__ import absolute_import, division, print_function
import pytest
def skip_check(name, iface, item):
if name in item.keywords and item.funcargs.get('backend') is not None:
if not isinstance(item.funcargs['backend'], iface):
pytest.skip("Backend does not support {0}".format(name))
## Instruction:
Include teh name of the backend in the error message
## Code After:
from __future__ import absolute_import, division, print_function
import pytest
def skip_check(name, iface, item):
if name in item.keywords and "backend" in item.funcargs:
if not isinstance(item.funcargs["backend"], iface):
pytest.skip("{0} backend does not support {1}".format(
item.funcargs["backend"], name
))
|
from __future__ import absolute_import, division, print_function
import pytest
def skip_check(name, iface, item):
- if name in item.keywords and item.funcargs.get('backend') is not None:
+ if name in item.keywords and "backend" in item.funcargs:
- if not isinstance(item.funcargs['backend'], iface):
? ^ ^
+ if not isinstance(item.funcargs["backend"], iface):
? ^ ^
- pytest.skip("Backend does not support {0}".format(name))
? ^ ^ ------
+ pytest.skip("{0} backend does not support {1}".format(
? ^^^^^ ^
+ item.funcargs["backend"], name
+ ))
|
15914cc8bd29392f204bec021b8cc34bf8507daa
|
saleor/integrations/management/commands/update_integrations.py
|
saleor/integrations/management/commands/update_integrations.py
|
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from saleor.integrations.feeds import SaleorFeed
from saleor.integrations import utils
class Command(BaseCommand):
help = 'Updates integration feeds. '
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='+', type=str)
def handle(self, *args, **options):
feed_names = options['feed_name'] or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from ....integrations.feeds import SaleorFeed
from ....integrations import utils
class Command(BaseCommand):
help = ('Updates integration feeds.'
'If feed name not provided, updates all available feeds')
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='*', type=str, default=None)
def handle(self, *args, **options):
feed_names = options.get('feed_name') or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
Fix imports style and made feed_name optional
|
Fix imports style and made feed_name optional
|
Python
|
bsd-3-clause
|
KenMutemi/saleor,UITools/saleor,jreigel/saleor,KenMutemi/saleor,HyperManTT/ECommerceSaleor,itbabu/saleor,jreigel/saleor,HyperManTT/ECommerceSaleor,tfroehlich82/saleor,tfroehlich82/saleor,maferelo/saleor,itbabu/saleor,KenMutemi/saleor,mociepka/saleor,UITools/saleor,car3oon/saleor,tfroehlich82/saleor,car3oon/saleor,UITools/saleor,maferelo/saleor,HyperManTT/ECommerceSaleor,mociepka/saleor,UITools/saleor,UITools/saleor,jreigel/saleor,maferelo/saleor,itbabu/saleor,car3oon/saleor,mociepka/saleor
|
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
- from saleor.integrations.feeds import SaleorFeed
+ from ....integrations.feeds import SaleorFeed
- from saleor.integrations import utils
+ from ....integrations import utils
class Command(BaseCommand):
- help = 'Updates integration feeds. '
+ help = ('Updates integration feeds.'
+ 'If feed name not provided, updates all available feeds')
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
- parser.add_argument('feed_name', nargs='+', type=str)
+ parser.add_argument('feed_name', nargs='*', type=str, default=None)
def handle(self, *args, **options):
- feed_names = options['feed_name'] or self.feed_classes.keys()
+ feed_names = options.get('feed_name') or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
Fix imports style and made feed_name optional
|
## Code Before:
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from saleor.integrations.feeds import SaleorFeed
from saleor.integrations import utils
class Command(BaseCommand):
help = 'Updates integration feeds. '
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='+', type=str)
def handle(self, *args, **options):
feed_names = options['feed_name'] or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
## Instruction:
Fix imports style and made feed_name optional
## Code After:
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from ....integrations.feeds import SaleorFeed
from ....integrations import utils
class Command(BaseCommand):
help = ('Updates integration feeds.'
'If feed name not provided, updates all available feeds')
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='*', type=str, default=None)
def handle(self, *args, **options):
feed_names = options.get('feed_name') or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
- from saleor.integrations.feeds import SaleorFeed
? ^^^^^^
+ from ....integrations.feeds import SaleorFeed
? ^^^
- from saleor.integrations import utils
? ^^^^^^
+ from ....integrations import utils
? ^^^
class Command(BaseCommand):
- help = 'Updates integration feeds. '
? -
+ help = ('Updates integration feeds.'
? +
+ 'If feed name not provided, updates all available feeds')
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
- parser.add_argument('feed_name', nargs='+', type=str)
? ^
+ parser.add_argument('feed_name', nargs='*', type=str, default=None)
? ^ ++++++++++++++
def handle(self, *args, **options):
- feed_names = options['feed_name'] or self.feed_classes.keys()
? ^ ^
+ feed_names = options.get('feed_name') or self.feed_classes.keys()
? ^^^^^ ^
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
03a95c87dde1a5b20658b3b61b4c4abc070e3bf3
|
flowtype/commands/base.py
|
flowtype/commands/base.py
|
import abc
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand, metaclass=abc.ABCMeta):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
@abc.abstractmethod
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
@abc.abstractmethod
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
Fix travis by removing abc metaclass.
|
Fix travis by removing abc metaclass.
|
Python
|
mit
|
Pegase745/sublime-flowtype
|
- import abc
-
import sublime
import sublime_plugin
from ..helpers import is_js_source
- class BaseCommand(sublime_plugin.TextCommand, metaclass=abc.ABCMeta):
+ class BaseCommand(sublime_plugin.TextCommand):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
- @abc.abstractmethod
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
- @abc.abstractmethod
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
Fix travis by removing abc metaclass.
|
## Code Before:
import abc
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand, metaclass=abc.ABCMeta):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
@abc.abstractmethod
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
@abc.abstractmethod
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
## Instruction:
Fix travis by removing abc metaclass.
## Code After:
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
- import abc
-
import sublime
import sublime_plugin
from ..helpers import is_js_source
- class BaseCommand(sublime_plugin.TextCommand, metaclass=abc.ABCMeta):
? -----------------------
+ class BaseCommand(sublime_plugin.TextCommand):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
- @abc.abstractmethod
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
- @abc.abstractmethod
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
3afa75c48d680111dc32368553cdc741eb0c07fa
|
imgfac/Singleton.py
|
imgfac/Singleton.py
|
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
instance._singleton_init(*args, **kwargs)
cls._instance = instance
elif args or kwargs:
cls._instance.log.warn('Attempted re-initialize of singleton: %s' % (cls._instance, ))
return cls._instance
def __init__(self):
pass
def _singleton_init(self):
"""Initialize a singleton instance before it is registered."""
pass
|
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
instance._singleton_init(*args, **kwargs)
cls._instance = instance
elif args or kwargs:
cls._instance.log.warn('Attempted re-initialize of singleton: %s' % (cls._instance, ))
return cls._instance
def __init__(self, *args, **kwargs):
pass
def _singleton_init(self, *args, **kwargs):
"""Initialize a singleton instance before it is registered."""
pass
|
Allow for parameters to __init__()
|
Allow for parameters to __init__()
Signed-off-by: Steve Loranz <[email protected]>
|
Python
|
apache-2.0
|
henrysher/imagefactory,LalatenduMohanty/imagefactory,jmcabandara/imagefactory,henrysher/imagefactory,redhat-imaging/imagefactory,jmcabandara/imagefactory,redhat-imaging/imagefactory,LalatenduMohanty/imagefactory
|
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
instance._singleton_init(*args, **kwargs)
cls._instance = instance
elif args or kwargs:
cls._instance.log.warn('Attempted re-initialize of singleton: %s' % (cls._instance, ))
return cls._instance
- def __init__(self):
+ def __init__(self, *args, **kwargs):
pass
- def _singleton_init(self):
+ def _singleton_init(self, *args, **kwargs):
"""Initialize a singleton instance before it is registered."""
pass
|
Allow for parameters to __init__()
|
## Code Before:
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
instance._singleton_init(*args, **kwargs)
cls._instance = instance
elif args or kwargs:
cls._instance.log.warn('Attempted re-initialize of singleton: %s' % (cls._instance, ))
return cls._instance
def __init__(self):
pass
def _singleton_init(self):
"""Initialize a singleton instance before it is registered."""
pass
## Instruction:
Allow for parameters to __init__()
## Code After:
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
instance._singleton_init(*args, **kwargs)
cls._instance = instance
elif args or kwargs:
cls._instance.log.warn('Attempted re-initialize of singleton: %s' % (cls._instance, ))
return cls._instance
def __init__(self, *args, **kwargs):
pass
def _singleton_init(self, *args, **kwargs):
"""Initialize a singleton instance before it is registered."""
pass
|
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
instance._singleton_init(*args, **kwargs)
cls._instance = instance
elif args or kwargs:
cls._instance.log.warn('Attempted re-initialize of singleton: %s' % (cls._instance, ))
return cls._instance
- def __init__(self):
+ def __init__(self, *args, **kwargs):
pass
- def _singleton_init(self):
+ def _singleton_init(self, *args, **kwargs):
? +++++++++++++++++
"""Initialize a singleton instance before it is registered."""
pass
|
00b134df7281c39595f9efcc1c1da047d1d10277
|
src/encoded/authorization.py
|
src/encoded/authorization.py
|
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab_uuids = user.properties.get('lab_uuids', [])
principals.extend('lab:' + lab_uuid for lab_uuid in lab_uuids)
if CHERRY_LAB_UUID in lab_uuids:
principals.append('group:admin')
return principals
|
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab = user.properties.get('lab')
if lab:
principals.append('lab:%s' % lab)
submits_for = user.properties.get('submits_for', [])
principals.extend('lab:%s' % lab_uuid for lab_uuid in submits_for)
principals.extend('submits_for:%s' % lab_uuid for lab_uuid in submits_for)
if CHERRY_LAB_UUID in submits_for:
principals.append('group:admin')
return principals
|
Update group finder to new schemas
|
Update group finder to new schemas
|
Python
|
mit
|
kidaa/encoded,philiptzou/clincoded,4dn-dcic/fourfront,hms-dbmi/fourfront,4dn-dcic/fourfront,philiptzou/clincoded,philiptzou/clincoded,hms-dbmi/fourfront,kidaa/encoded,ENCODE-DCC/snovault,ClinGen/clincoded,ENCODE-DCC/snovault,kidaa/encoded,T2DREAM/t2dream-portal,philiptzou/clincoded,ENCODE-DCC/snovault,ENCODE-DCC/encoded,hms-dbmi/fourfront,T2DREAM/t2dream-portal,kidaa/encoded,4dn-dcic/fourfront,4dn-dcic/fourfront,ENCODE-DCC/snovault,hms-dbmi/fourfront,ENCODE-DCC/encoded,ClinGen/clincoded,ENCODE-DCC/snovault,T2DREAM/t2dream-portal,hms-dbmi/fourfront,kidaa/encoded,ClinGen/clincoded,ENCODE-DCC/encoded,ClinGen/clincoded,ClinGen/clincoded,T2DREAM/t2dream-portal,ENCODE-DCC/encoded,philiptzou/clincoded
|
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
- lab_uuids = user.properties.get('lab_uuids', [])
+ lab = user.properties.get('lab')
+ if lab:
+ principals.append('lab:%s' % lab)
+ submits_for = user.properties.get('submits_for', [])
- principals.extend('lab:' + lab_uuid for lab_uuid in lab_uuids)
+ principals.extend('lab:%s' % lab_uuid for lab_uuid in submits_for)
+ principals.extend('submits_for:%s' % lab_uuid for lab_uuid in submits_for)
- if CHERRY_LAB_UUID in lab_uuids:
+ if CHERRY_LAB_UUID in submits_for:
principals.append('group:admin')
return principals
|
Update group finder to new schemas
|
## Code Before:
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab_uuids = user.properties.get('lab_uuids', [])
principals.extend('lab:' + lab_uuid for lab_uuid in lab_uuids)
if CHERRY_LAB_UUID in lab_uuids:
principals.append('group:admin')
return principals
## Instruction:
Update group finder to new schemas
## Code After:
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab = user.properties.get('lab')
if lab:
principals.append('lab:%s' % lab)
submits_for = user.properties.get('submits_for', [])
principals.extend('lab:%s' % lab_uuid for lab_uuid in submits_for)
principals.extend('submits_for:%s' % lab_uuid for lab_uuid in submits_for)
if CHERRY_LAB_UUID in submits_for:
principals.append('group:admin')
return principals
|
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
- lab_uuids = user.properties.get('lab_uuids', [])
? ------ ------ ----
+ lab = user.properties.get('lab')
+ if lab:
+ principals.append('lab:%s' % lab)
+ submits_for = user.properties.get('submits_for', [])
- principals.extend('lab:' + lab_uuid for lab_uuid in lab_uuids)
? ^ ^^ ^^^^^
+ principals.extend('lab:%s' % lab_uuid for lab_uuid in submits_for)
? ++ ^ ^^ ++++ ^^^
+ principals.extend('submits_for:%s' % lab_uuid for lab_uuid in submits_for)
- if CHERRY_LAB_UUID in lab_uuids:
? ^^ ^^^^^
+ if CHERRY_LAB_UUID in submits_for:
? ^^ ++++ ^^^
principals.append('group:admin')
return principals
|
e39b59ab345d9d72a31d739218d68072d3794cf6
|
networkzero/config.py
|
networkzero/config.py
|
ENCODING = "UTF-8"
class _Forever(object):
def __repr__(self): return "<Forever>"
FOREVER = _Forever()
SHORT_WAIT = 1 # 1 second
EVERYTHING = ""
COMMAND_ACK = "ack"
#
# Beacons will broadcast adverts at this frequency
#
BEACON_ADVERT_FREQUENCY_S = 5
#
# Adverts will expire after this many seconds unless
# a fresh broadcast is received. Default it above the
# broadcast frequency so adverts are not forever expiring
# and being recreated by the next received broadcast.
#
ADVERT_TTL_S = 3 * BEACON_ADVERT_FREQUENCY_S
VALID_PORTS = range(0x10000)
DYNAMIC_PORTS = range(0xC000, 0x10000)
|
ENCODING = "UTF-8"
class _Forever(object):
def __repr__(self): return "<Forever>"
FOREVER = _Forever()
SHORT_WAIT = 1 # 1 second
EVERYTHING = ""
COMMAND_ACK = "ack"
#
# Beacons will broadcast adverts at this frequency
#
BEACON_ADVERT_FREQUENCY_S = 2
#
# Adverts will expire after this many seconds unless
# a fresh broadcast is received. Default it above the
# broadcast frequency so adverts are not forever expiring
# and being recreated by the next received broadcast.
#
# NB since adverts are broadcast round-robin (ie only one advert
# is broadcast every BEACON_ADVERT_FREQUENCY_S seconds) we need
# to allow for the possibility that any given name might only
# be advertised, say, once every 5 times.
#
ADVERT_TTL_S = 10 * BEACON_ADVERT_FREQUENCY_S
VALID_PORTS = range(0x10000)
DYNAMIC_PORTS = range(0xC000, 0x10000)
|
Speed up the broadcast frequency
|
Speed up the broadcast frequency
|
Python
|
mit
|
tjguk/networkzero,tjguk/networkzero,tjguk/networkzero
|
ENCODING = "UTF-8"
class _Forever(object):
def __repr__(self): return "<Forever>"
FOREVER = _Forever()
SHORT_WAIT = 1 # 1 second
EVERYTHING = ""
COMMAND_ACK = "ack"
#
# Beacons will broadcast adverts at this frequency
#
- BEACON_ADVERT_FREQUENCY_S = 5
+ BEACON_ADVERT_FREQUENCY_S = 2
#
# Adverts will expire after this many seconds unless
# a fresh broadcast is received. Default it above the
# broadcast frequency so adverts are not forever expiring
# and being recreated by the next received broadcast.
#
+ # NB since adverts are broadcast round-robin (ie only one advert
+ # is broadcast every BEACON_ADVERT_FREQUENCY_S seconds) we need
+ # to allow for the possibility that any given name might only
+ # be advertised, say, once every 5 times.
+ #
- ADVERT_TTL_S = 3 * BEACON_ADVERT_FREQUENCY_S
+ ADVERT_TTL_S = 10 * BEACON_ADVERT_FREQUENCY_S
VALID_PORTS = range(0x10000)
DYNAMIC_PORTS = range(0xC000, 0x10000)
|
Speed up the broadcast frequency
|
## Code Before:
ENCODING = "UTF-8"
class _Forever(object):
def __repr__(self): return "<Forever>"
FOREVER = _Forever()
SHORT_WAIT = 1 # 1 second
EVERYTHING = ""
COMMAND_ACK = "ack"
#
# Beacons will broadcast adverts at this frequency
#
BEACON_ADVERT_FREQUENCY_S = 5
#
# Adverts will expire after this many seconds unless
# a fresh broadcast is received. Default it above the
# broadcast frequency so adverts are not forever expiring
# and being recreated by the next received broadcast.
#
ADVERT_TTL_S = 3 * BEACON_ADVERT_FREQUENCY_S
VALID_PORTS = range(0x10000)
DYNAMIC_PORTS = range(0xC000, 0x10000)
## Instruction:
Speed up the broadcast frequency
## Code After:
ENCODING = "UTF-8"
class _Forever(object):
def __repr__(self): return "<Forever>"
FOREVER = _Forever()
SHORT_WAIT = 1 # 1 second
EVERYTHING = ""
COMMAND_ACK = "ack"
#
# Beacons will broadcast adverts at this frequency
#
BEACON_ADVERT_FREQUENCY_S = 2
#
# Adverts will expire after this many seconds unless
# a fresh broadcast is received. Default it above the
# broadcast frequency so adverts are not forever expiring
# and being recreated by the next received broadcast.
#
# NB since adverts are broadcast round-robin (ie only one advert
# is broadcast every BEACON_ADVERT_FREQUENCY_S seconds) we need
# to allow for the possibility that any given name might only
# be advertised, say, once every 5 times.
#
ADVERT_TTL_S = 10 * BEACON_ADVERT_FREQUENCY_S
VALID_PORTS = range(0x10000)
DYNAMIC_PORTS = range(0xC000, 0x10000)
|
ENCODING = "UTF-8"
class _Forever(object):
def __repr__(self): return "<Forever>"
FOREVER = _Forever()
SHORT_WAIT = 1 # 1 second
EVERYTHING = ""
COMMAND_ACK = "ack"
#
# Beacons will broadcast adverts at this frequency
#
- BEACON_ADVERT_FREQUENCY_S = 5
? ^
+ BEACON_ADVERT_FREQUENCY_S = 2
? ^
#
# Adverts will expire after this many seconds unless
# a fresh broadcast is received. Default it above the
# broadcast frequency so adverts are not forever expiring
# and being recreated by the next received broadcast.
#
+ # NB since adverts are broadcast round-robin (ie only one advert
+ # is broadcast every BEACON_ADVERT_FREQUENCY_S seconds) we need
+ # to allow for the possibility that any given name might only
+ # be advertised, say, once every 5 times.
+ #
- ADVERT_TTL_S = 3 * BEACON_ADVERT_FREQUENCY_S
? ^
+ ADVERT_TTL_S = 10 * BEACON_ADVERT_FREQUENCY_S
? ^^
VALID_PORTS = range(0x10000)
DYNAMIC_PORTS = range(0xC000, 0x10000)
|
03eb0081a4037e36775271fb2373277f8e89835b
|
src/mcedit2/resourceloader.py
|
src/mcedit2/resourceloader.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import zipfile
log = logging.getLogger(__name__)
class ResourceNotFound(KeyError):
pass
class ResourceLoader(object):
def __init__(self):
super(ResourceLoader, self).__init__()
self.zipFiles = []
def addZipFile(self, zipPath):
try:
zf = zipfile.ZipFile(zipPath)
except zipfile.BadZipfile as e:
raise IOError("Could not read %s as a zip file." % zipPath)
self.zipFiles.append(zf)
def openStream(self, path):
for zipFile in self.zipFiles:
try:
stream = zipFile.open(path)
break
except KeyError: # Not found in zip file
continue
else:
raise ResourceNotFound("Resource %s not found in search path" % path)
return stream
|
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import zipfile
log = logging.getLogger(__name__)
class ResourceNotFound(KeyError):
pass
class ResourceLoader(object):
def __init__(self):
super(ResourceLoader, self).__init__()
self.zipFiles = []
def addZipFile(self, zipPath):
try:
zf = zipfile.ZipFile(zipPath)
except zipfile.BadZipfile as e:
raise IOError("Could not read %s as a zip file." % zipPath)
self.zipFiles.append(zf)
def openStream(self, path):
for zipFile in self.zipFiles:
try:
stream = zipFile.open(path)
break
except KeyError: # Not found in zip file
continue
else:
raise ResourceNotFound("Resource %s not found in search path" % path)
return stream
def blockModelPaths(self):
for zf in self.zipFiles:
for name in zf.namelist():
if name.startswith("assets/minecraft/models/block"):
yield name
|
Add function to ResourceLoader for listing all block models
|
Add function to ResourceLoader for listing all block models
xxx only lists Vanilla models. haven't looked at mods with models yet.
|
Python
|
bsd-3-clause
|
vorburger/mcedit2,vorburger/mcedit2,Rubisk/mcedit2,Rubisk/mcedit2
|
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import zipfile
log = logging.getLogger(__name__)
class ResourceNotFound(KeyError):
pass
class ResourceLoader(object):
def __init__(self):
super(ResourceLoader, self).__init__()
self.zipFiles = []
def addZipFile(self, zipPath):
try:
zf = zipfile.ZipFile(zipPath)
except zipfile.BadZipfile as e:
raise IOError("Could not read %s as a zip file." % zipPath)
self.zipFiles.append(zf)
def openStream(self, path):
for zipFile in self.zipFiles:
try:
stream = zipFile.open(path)
break
except KeyError: # Not found in zip file
continue
else:
raise ResourceNotFound("Resource %s not found in search path" % path)
return stream
+ def blockModelPaths(self):
+ for zf in self.zipFiles:
+ for name in zf.namelist():
+ if name.startswith("assets/minecraft/models/block"):
+ yield name
+
|
Add function to ResourceLoader for listing all block models
|
## Code Before:
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import zipfile
log = logging.getLogger(__name__)
class ResourceNotFound(KeyError):
pass
class ResourceLoader(object):
def __init__(self):
super(ResourceLoader, self).__init__()
self.zipFiles = []
def addZipFile(self, zipPath):
try:
zf = zipfile.ZipFile(zipPath)
except zipfile.BadZipfile as e:
raise IOError("Could not read %s as a zip file." % zipPath)
self.zipFiles.append(zf)
def openStream(self, path):
for zipFile in self.zipFiles:
try:
stream = zipFile.open(path)
break
except KeyError: # Not found in zip file
continue
else:
raise ResourceNotFound("Resource %s not found in search path" % path)
return stream
## Instruction:
Add function to ResourceLoader for listing all block models
## Code After:
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import zipfile
log = logging.getLogger(__name__)
class ResourceNotFound(KeyError):
pass
class ResourceLoader(object):
def __init__(self):
super(ResourceLoader, self).__init__()
self.zipFiles = []
def addZipFile(self, zipPath):
try:
zf = zipfile.ZipFile(zipPath)
except zipfile.BadZipfile as e:
raise IOError("Could not read %s as a zip file." % zipPath)
self.zipFiles.append(zf)
def openStream(self, path):
for zipFile in self.zipFiles:
try:
stream = zipFile.open(path)
break
except KeyError: # Not found in zip file
continue
else:
raise ResourceNotFound("Resource %s not found in search path" % path)
return stream
def blockModelPaths(self):
for zf in self.zipFiles:
for name in zf.namelist():
if name.startswith("assets/minecraft/models/block"):
yield name
|
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import zipfile
log = logging.getLogger(__name__)
class ResourceNotFound(KeyError):
pass
class ResourceLoader(object):
def __init__(self):
super(ResourceLoader, self).__init__()
self.zipFiles = []
def addZipFile(self, zipPath):
try:
zf = zipfile.ZipFile(zipPath)
except zipfile.BadZipfile as e:
raise IOError("Could not read %s as a zip file." % zipPath)
self.zipFiles.append(zf)
def openStream(self, path):
for zipFile in self.zipFiles:
try:
stream = zipFile.open(path)
break
except KeyError: # Not found in zip file
continue
else:
raise ResourceNotFound("Resource %s not found in search path" % path)
return stream
+
+ def blockModelPaths(self):
+ for zf in self.zipFiles:
+ for name in zf.namelist():
+ if name.startswith("assets/minecraft/models/block"):
+ yield name
|
370507fc48636417a10e4075917783169f3653c3
|
test_edelbaum.py
|
test_edelbaum.py
|
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # deg
i_0 = 28.5 # deg
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=0)
assert_almost_equal(delta_V, expected_delta_V, decimal=1)
|
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # rad
i_0 = (28.5 * u.deg).to(u.rad).value # rad
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=2)
assert_almost_equal(delta_V, expected_delta_V, decimal=4)
|
Fix unit error, improve precision
|
Fix unit error, improve precision
|
Python
|
mit
|
Juanlu001/pfc-uc3m
|
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
- i_f = 0.0 # deg
+ i_f = 0.0 # rad
- i_0 = 28.5 # deg
+ i_0 = (28.5 * u.deg).to(u.rad).value # rad
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
- assert_almost_equal(t_f / 86400, expected_t_f, decimal=0)
+ assert_almost_equal(t_f / 86400, expected_t_f, decimal=2)
- assert_almost_equal(delta_V, expected_delta_V, decimal=1)
+ assert_almost_equal(delta_V, expected_delta_V, decimal=4)
|
Fix unit error, improve precision
|
## Code Before:
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # deg
i_0 = 28.5 # deg
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=0)
assert_almost_equal(delta_V, expected_delta_V, decimal=1)
## Instruction:
Fix unit error, improve precision
## Code After:
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # rad
i_0 = (28.5 * u.deg).to(u.rad).value # rad
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=2)
assert_almost_equal(delta_V, expected_delta_V, decimal=4)
|
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
- i_f = 0.0 # deg
? --
+ i_f = 0.0 # rad
? ++
- i_0 = 28.5 # deg
+ i_0 = (28.5 * u.deg).to(u.rad).value # rad
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
- assert_almost_equal(t_f / 86400, expected_t_f, decimal=0)
? ^
+ assert_almost_equal(t_f / 86400, expected_t_f, decimal=2)
? ^
- assert_almost_equal(delta_V, expected_delta_V, decimal=1)
? ^
+ assert_almost_equal(delta_V, expected_delta_V, decimal=4)
? ^
|
7755dda1449f6264d7d7fe57dc776c731ab22d84
|
src/satosa/micro_services/processors/scope_processor.py
|
src/satosa/micro_services/processors/scope_processor.py
|
from ..attribute_processor import AttributeProcessorError
from .base_processor import BaseProcessor
CONFIG_KEY_SCOPE = 'scope'
CONFIG_DEFAULT_SCOPE = ''
class ScopeProcessor(BaseProcessor):
def process(self, internal_data, attribute, **kwargs):
scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE)
if scope is None or scope == '':
raise AttributeProcessorError("No scope set.")
attributes = internal_data.attributes
value = attributes.get(attribute, [None])[0]
attributes[attribute][0] = value + '@' + scope
|
from ..attribute_processor import AttributeProcessorError
from .base_processor import BaseProcessor
CONFIG_KEY_SCOPE = 'scope'
CONFIG_DEFAULT_SCOPE = ''
class ScopeProcessor(BaseProcessor):
def process(self, internal_data, attribute, **kwargs):
scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE)
if scope is None or scope == '':
raise AttributeProcessorError("No scope set.")
attributes = internal_data.attributes
values = attributes.get(attribute, [])
if not isinstance(values, list):
values = [values]
if values:
new_values=[]
for value in values:
new_values.append(value + '@' + scope)
attributes[attribute] = new_values
|
Allow scope processor to handle multivalued attributes
|
Allow scope processor to handle multivalued attributes
|
Python
|
apache-2.0
|
its-dirg/SATOSA,irtnog/SATOSA,SUNET/SATOSA,SUNET/SATOSA,irtnog/SATOSA
|
from ..attribute_processor import AttributeProcessorError
from .base_processor import BaseProcessor
CONFIG_KEY_SCOPE = 'scope'
CONFIG_DEFAULT_SCOPE = ''
class ScopeProcessor(BaseProcessor):
def process(self, internal_data, attribute, **kwargs):
scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE)
if scope is None or scope == '':
raise AttributeProcessorError("No scope set.")
attributes = internal_data.attributes
- value = attributes.get(attribute, [None])[0]
+ values = attributes.get(attribute, [])
+ if not isinstance(values, list):
+ values = [values]
+ if values:
+ new_values=[]
+ for value in values:
+ new_values.append(value + '@' + scope)
- attributes[attribute][0] = value + '@' + scope
+ attributes[attribute] = new_values
|
Allow scope processor to handle multivalued attributes
|
## Code Before:
from ..attribute_processor import AttributeProcessorError
from .base_processor import BaseProcessor
CONFIG_KEY_SCOPE = 'scope'
CONFIG_DEFAULT_SCOPE = ''
class ScopeProcessor(BaseProcessor):
def process(self, internal_data, attribute, **kwargs):
scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE)
if scope is None or scope == '':
raise AttributeProcessorError("No scope set.")
attributes = internal_data.attributes
value = attributes.get(attribute, [None])[0]
attributes[attribute][0] = value + '@' + scope
## Instruction:
Allow scope processor to handle multivalued attributes
## Code After:
from ..attribute_processor import AttributeProcessorError
from .base_processor import BaseProcessor
CONFIG_KEY_SCOPE = 'scope'
CONFIG_DEFAULT_SCOPE = ''
class ScopeProcessor(BaseProcessor):
def process(self, internal_data, attribute, **kwargs):
scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE)
if scope is None or scope == '':
raise AttributeProcessorError("No scope set.")
attributes = internal_data.attributes
values = attributes.get(attribute, [])
if not isinstance(values, list):
values = [values]
if values:
new_values=[]
for value in values:
new_values.append(value + '@' + scope)
attributes[attribute] = new_values
|
from ..attribute_processor import AttributeProcessorError
from .base_processor import BaseProcessor
CONFIG_KEY_SCOPE = 'scope'
CONFIG_DEFAULT_SCOPE = ''
class ScopeProcessor(BaseProcessor):
def process(self, internal_data, attribute, **kwargs):
scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE)
if scope is None or scope == '':
raise AttributeProcessorError("No scope set.")
attributes = internal_data.attributes
- value = attributes.get(attribute, [None])[0]
? ---- ---
+ values = attributes.get(attribute, [])
? +
+ if not isinstance(values, list):
+ values = [values]
+ if values:
+ new_values=[]
+ for value in values:
+ new_values.append(value + '@' + scope)
- attributes[attribute][0] = value + '@' + scope
? --- --------- ----
+ attributes[attribute] = new_values
? ++++ ++++
|
8416a3ed1a6af2d0037f77744d809441591086cd
|
mrp_bom_location/models/mrp_bom.py
|
mrp_bom_location/models/mrp_bom.py
|
from odoo import fields, models
class MrpBom(models.Model):
_inherit = "mrp.bom"
location_id = fields.Many2one(
related='picking_type_id.default_location_dest_id',
store=True,
)
class MrpBomLine(models.Model):
_inherit = "mrp.bom.line"
location_id = fields.Many2one(
related='bom_id.picking_type_id.default_location_src_id',
store=True,
)
|
from odoo import fields, models
class MrpBom(models.Model):
_inherit = "mrp.bom"
location_id = fields.Many2one(
related='picking_type_id.default_location_dest_id',
readonly=True,
store=True,
)
class MrpBomLine(models.Model):
_inherit = "mrp.bom.line"
location_id = fields.Many2one(
related='bom_id.picking_type_id.default_location_src_id',
readonly=True,
store=True,
)
|
Make the related location readonly
|
[IMP] Make the related location readonly
|
Python
|
agpl-3.0
|
OCA/manufacture,OCA/manufacture
|
from odoo import fields, models
class MrpBom(models.Model):
_inherit = "mrp.bom"
location_id = fields.Many2one(
related='picking_type_id.default_location_dest_id',
+ readonly=True,
store=True,
)
class MrpBomLine(models.Model):
_inherit = "mrp.bom.line"
location_id = fields.Many2one(
related='bom_id.picking_type_id.default_location_src_id',
+ readonly=True,
store=True,
)
|
Make the related location readonly
|
## Code Before:
from odoo import fields, models
class MrpBom(models.Model):
_inherit = "mrp.bom"
location_id = fields.Many2one(
related='picking_type_id.default_location_dest_id',
store=True,
)
class MrpBomLine(models.Model):
_inherit = "mrp.bom.line"
location_id = fields.Many2one(
related='bom_id.picking_type_id.default_location_src_id',
store=True,
)
## Instruction:
Make the related location readonly
## Code After:
from odoo import fields, models
class MrpBom(models.Model):
_inherit = "mrp.bom"
location_id = fields.Many2one(
related='picking_type_id.default_location_dest_id',
readonly=True,
store=True,
)
class MrpBomLine(models.Model):
_inherit = "mrp.bom.line"
location_id = fields.Many2one(
related='bom_id.picking_type_id.default_location_src_id',
readonly=True,
store=True,
)
|
from odoo import fields, models
class MrpBom(models.Model):
_inherit = "mrp.bom"
location_id = fields.Many2one(
related='picking_type_id.default_location_dest_id',
+ readonly=True,
store=True,
)
class MrpBomLine(models.Model):
_inherit = "mrp.bom.line"
location_id = fields.Many2one(
related='bom_id.picking_type_id.default_location_src_id',
+ readonly=True,
store=True,
)
|
22e90cf883fa0b6d4c8acb282ebe28929f6d9487
|
nhs/patents/models.py
|
nhs/patents/models.py
|
from django.db import models
from nhs.prescriptions.models import Product
class Patent(models.Model):
drug = models.ForeignKey(Product)
expiry_date = models.DateField()
start_date = models.DateField(null=True, blank=True)
# Stupid. But you know, they're called patent numbers.
# Except they have letters in them.
number = models.CharField(max_length=200, null=True, blank=True)
|
from django.db import models
from nhs.prescriptions.models import Product
class Patent(models.Model):
drug = models.ForeignKey(Product)
expiry_date = models.DateField()
start_date = models.DateField(null=True, blank=True)
# Stupid. But you know, they're called patent numbers.
# Except they have letters in them.
number = models.CharField(max_length=200, null=True, blank=True)
source = models.CharField(max_length=200, null=True, blank=True)
|
Add that field in the model
|
Add that field in the model
|
Python
|
agpl-3.0
|
openhealthcare/open-prescribing,openhealthcare/open-prescribing,openhealthcare/open-prescribing
|
from django.db import models
from nhs.prescriptions.models import Product
class Patent(models.Model):
drug = models.ForeignKey(Product)
expiry_date = models.DateField()
start_date = models.DateField(null=True, blank=True)
# Stupid. But you know, they're called patent numbers.
# Except they have letters in them.
number = models.CharField(max_length=200, null=True, blank=True)
+ source = models.CharField(max_length=200, null=True, blank=True)
|
Add that field in the model
|
## Code Before:
from django.db import models
from nhs.prescriptions.models import Product
class Patent(models.Model):
drug = models.ForeignKey(Product)
expiry_date = models.DateField()
start_date = models.DateField(null=True, blank=True)
# Stupid. But you know, they're called patent numbers.
# Except they have letters in them.
number = models.CharField(max_length=200, null=True, blank=True)
## Instruction:
Add that field in the model
## Code After:
from django.db import models
from nhs.prescriptions.models import Product
class Patent(models.Model):
drug = models.ForeignKey(Product)
expiry_date = models.DateField()
start_date = models.DateField(null=True, blank=True)
# Stupid. But you know, they're called patent numbers.
# Except they have letters in them.
number = models.CharField(max_length=200, null=True, blank=True)
source = models.CharField(max_length=200, null=True, blank=True)
|
from django.db import models
from nhs.prescriptions.models import Product
class Patent(models.Model):
drug = models.ForeignKey(Product)
expiry_date = models.DateField()
start_date = models.DateField(null=True, blank=True)
# Stupid. But you know, they're called patent numbers.
# Except they have letters in them.
number = models.CharField(max_length=200, null=True, blank=True)
+ source = models.CharField(max_length=200, null=True, blank=True)
|
58321c0471f10169ea5c0f705cdb35825036d77f
|
cat_leds.py
|
cat_leds.py
|
"""Display standard input on LEDs."""
import sys
import RPi.GPIO as GPIO
from seven_segment import print_leds
pins = [17, 23, 24, 22, 27, 25, 5]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
pipe_contents = sys.stdin.read()
print_leds(pipe_contents, pins)
GPIO.cleanup()
|
"""Display standard input on LEDs."""
import sys
import RPi.GPIO as GPIO
from seven_segment import print_leds
pins = [6, 19, 5, 13, 20, 12, 16]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
pipe_contents = sys.stdin.read()
print_leds(pipe_contents, pins, 1/2.0)
GPIO.cleanup()
|
Set up for my new 7 seg display.
|
Set up for my new 7 seg display.
|
Python
|
mit
|
zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie
|
"""Display standard input on LEDs."""
import sys
import RPi.GPIO as GPIO
from seven_segment import print_leds
- pins = [17, 23, 24, 22, 27, 25, 5]
+
+ pins = [6, 19, 5, 13, 20, 12, 16]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
pipe_contents = sys.stdin.read()
- print_leds(pipe_contents, pins)
+ print_leds(pipe_contents, pins, 1/2.0)
GPIO.cleanup()
|
Set up for my new 7 seg display.
|
## Code Before:
"""Display standard input on LEDs."""
import sys
import RPi.GPIO as GPIO
from seven_segment import print_leds
pins = [17, 23, 24, 22, 27, 25, 5]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
pipe_contents = sys.stdin.read()
print_leds(pipe_contents, pins)
GPIO.cleanup()
## Instruction:
Set up for my new 7 seg display.
## Code After:
"""Display standard input on LEDs."""
import sys
import RPi.GPIO as GPIO
from seven_segment import print_leds
pins = [6, 19, 5, 13, 20, 12, 16]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
pipe_contents = sys.stdin.read()
print_leds(pipe_contents, pins, 1/2.0)
GPIO.cleanup()
|
"""Display standard input on LEDs."""
import sys
import RPi.GPIO as GPIO
from seven_segment import print_leds
- pins = [17, 23, 24, 22, 27, 25, 5]
+
+ pins = [6, 19, 5, 13, 20, 12, 16]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
pipe_contents = sys.stdin.read()
- print_leds(pipe_contents, pins)
+ print_leds(pipe_contents, pins, 1/2.0)
? +++++++
GPIO.cleanup()
|
affae124162f03ce8783ced01916c11777cff25f
|
flocker/cli/test/test_deploy_script.py
|
flocker/cli/test/test_deploy_script.py
|
from twisted.trial.unittest import TestCase, SynchronousTestCase
from ...testtools import FlockerScriptTestsMixin, StandardOptionsTestsMixin
from ..script import DeployScript, DeployOptions
class FlockerDeployTests(FlockerScriptTestsMixin, TestCase):
"""Tests for ``flocker-deploy``."""
script = DeployScript
options = DeployOptions
command_name = u'flocker-deploy'
class DeployOptionsTests(StandardOptionsTestsMixin, SynchronousTestCase):
"""Tests for :class:`DeployOptions`."""
options = DeployOptions
class FlockerDeployMainTests(SynchronousTestCase):
"""
Tests for ``DeployScript.main``.
"""
def test_deferred_result(self):
"""
``DeployScript.main`` returns ``True`` on success.
"""
script = DeployScript()
self.assertTrue(script.main(reactor=object(), options={}))
|
from twisted.trial.unittest import TestCase, SynchronousTestCase
from ...testtools import FlockerScriptTestsMixin, StandardOptionsTestsMixin
from ..script import DeployScript, DeployOptions
class FlockerDeployTests(FlockerScriptTestsMixin, TestCase):
"""Tests for ``flocker-deploy``."""
script = DeployScript
options = DeployOptions
command_name = u'flocker-deploy'
class DeployOptionsTests(StandardOptionsTestsMixin, SynchronousTestCase):
"""Tests for :class:`DeployOptions`."""
options = DeployOptions
def test_custom_configs(self):
"""Custom config files can be specified."""
options = self.options()
options.parseOptions([b"/path/somefile.json", b"/path/anotherfile.json"])
self.assertEqual(options, {deploy: b"/path/somefile.json", app: b"/path/anotherfile.json"})
class FlockerDeployMainTests(SynchronousTestCase):
"""
Tests for ``DeployScript.main``.
"""
def test_success(self):
"""
``DeployScript.main`` returns ``True`` on success.
"""
script = DeployScript()
self.assertTrue(script.main(reactor=object(), options={}))
|
Test that DeployOptions sets two options
|
Test that DeployOptions sets two options
|
Python
|
apache-2.0
|
mbrukman/flocker,runcom/flocker,LaynePeng/flocker,w4ngyi/flocker,LaynePeng/flocker,AndyHuu/flocker,hackday-profilers/flocker,runcom/flocker,achanda/flocker,achanda/flocker,Azulinho/flocker,1d4Nf6/flocker,lukemarsden/flocker,runcom/flocker,AndyHuu/flocker,hackday-profilers/flocker,agonzalezro/flocker,Azulinho/flocker,moypray/flocker,hackday-profilers/flocker,agonzalezro/flocker,LaynePeng/flocker,lukemarsden/flocker,jml/flocker,wallnerryan/flocker-profiles,adamtheturtle/flocker,Azulinho/flocker,adamtheturtle/flocker,AndyHuu/flocker,moypray/flocker,beni55/flocker,1d4Nf6/flocker,jml/flocker,mbrukman/flocker,beni55/flocker,achanda/flocker,w4ngyi/flocker,agonzalezro/flocker,mbrukman/flocker,wallnerryan/flocker-profiles,jml/flocker,moypray/flocker,beni55/flocker,1d4Nf6/flocker,adamtheturtle/flocker,w4ngyi/flocker,lukemarsden/flocker,wallnerryan/flocker-profiles
|
from twisted.trial.unittest import TestCase, SynchronousTestCase
from ...testtools import FlockerScriptTestsMixin, StandardOptionsTestsMixin
from ..script import DeployScript, DeployOptions
class FlockerDeployTests(FlockerScriptTestsMixin, TestCase):
"""Tests for ``flocker-deploy``."""
script = DeployScript
options = DeployOptions
command_name = u'flocker-deploy'
class DeployOptionsTests(StandardOptionsTestsMixin, SynchronousTestCase):
"""Tests for :class:`DeployOptions`."""
options = DeployOptions
+ def test_custom_configs(self):
+ """Custom config files can be specified."""
+ options = self.options()
+ options.parseOptions([b"/path/somefile.json", b"/path/anotherfile.json"])
+ self.assertEqual(options, {deploy: b"/path/somefile.json", app: b"/path/anotherfile.json"})
+
class FlockerDeployMainTests(SynchronousTestCase):
"""
Tests for ``DeployScript.main``.
"""
- def test_deferred_result(self):
+ def test_success(self):
"""
``DeployScript.main`` returns ``True`` on success.
"""
script = DeployScript()
self.assertTrue(script.main(reactor=object(), options={}))
+
+
|
Test that DeployOptions sets two options
|
## Code Before:
from twisted.trial.unittest import TestCase, SynchronousTestCase
from ...testtools import FlockerScriptTestsMixin, StandardOptionsTestsMixin
from ..script import DeployScript, DeployOptions
class FlockerDeployTests(FlockerScriptTestsMixin, TestCase):
"""Tests for ``flocker-deploy``."""
script = DeployScript
options = DeployOptions
command_name = u'flocker-deploy'
class DeployOptionsTests(StandardOptionsTestsMixin, SynchronousTestCase):
"""Tests for :class:`DeployOptions`."""
options = DeployOptions
class FlockerDeployMainTests(SynchronousTestCase):
"""
Tests for ``DeployScript.main``.
"""
def test_deferred_result(self):
"""
``DeployScript.main`` returns ``True`` on success.
"""
script = DeployScript()
self.assertTrue(script.main(reactor=object(), options={}))
## Instruction:
Test that DeployOptions sets two options
## Code After:
from twisted.trial.unittest import TestCase, SynchronousTestCase
from ...testtools import FlockerScriptTestsMixin, StandardOptionsTestsMixin
from ..script import DeployScript, DeployOptions
class FlockerDeployTests(FlockerScriptTestsMixin, TestCase):
"""Tests for ``flocker-deploy``."""
script = DeployScript
options = DeployOptions
command_name = u'flocker-deploy'
class DeployOptionsTests(StandardOptionsTestsMixin, SynchronousTestCase):
"""Tests for :class:`DeployOptions`."""
options = DeployOptions
def test_custom_configs(self):
"""Custom config files can be specified."""
options = self.options()
options.parseOptions([b"/path/somefile.json", b"/path/anotherfile.json"])
self.assertEqual(options, {deploy: b"/path/somefile.json", app: b"/path/anotherfile.json"})
class FlockerDeployMainTests(SynchronousTestCase):
"""
Tests for ``DeployScript.main``.
"""
def test_success(self):
"""
``DeployScript.main`` returns ``True`` on success.
"""
script = DeployScript()
self.assertTrue(script.main(reactor=object(), options={}))
|
from twisted.trial.unittest import TestCase, SynchronousTestCase
from ...testtools import FlockerScriptTestsMixin, StandardOptionsTestsMixin
from ..script import DeployScript, DeployOptions
class FlockerDeployTests(FlockerScriptTestsMixin, TestCase):
"""Tests for ``flocker-deploy``."""
script = DeployScript
options = DeployOptions
command_name = u'flocker-deploy'
class DeployOptionsTests(StandardOptionsTestsMixin, SynchronousTestCase):
"""Tests for :class:`DeployOptions`."""
options = DeployOptions
+ def test_custom_configs(self):
+ """Custom config files can be specified."""
+ options = self.options()
+ options.parseOptions([b"/path/somefile.json", b"/path/anotherfile.json"])
+ self.assertEqual(options, {deploy: b"/path/somefile.json", app: b"/path/anotherfile.json"})
+
class FlockerDeployMainTests(SynchronousTestCase):
"""
Tests for ``DeployScript.main``.
"""
- def test_deferred_result(self):
+ def test_success(self):
"""
``DeployScript.main`` returns ``True`` on success.
"""
script = DeployScript()
self.assertTrue(script.main(reactor=object(), options={}))
+
|
3bce013c51c454721de3a868ea6d8e8c6d335112
|
cycli/neo4j.py
|
cycli/neo4j.py
|
import requests
from py2neo import Graph, authenticate
class Neo4j:
def __init__(self, host, port, username=None, password=None):
self.host = host
self.port = port
self.username = username
self.password = password
self.host_port = "{host}:{port}".format(host=host, port=port)
self.url = "http://{host_port}/db/data/".format(host_port=self.host_port)
def connection(self):
if self.username and self.password:
authenticate(self.host_port, self.username, self.password)
graph = Graph(self.url)
return graph
def cypher(self, query):
tx = self.connection().cypher.begin()
try:
tx.append(query)
results = tx.process()
tx.commit()
except Exception as e:
results = e
except KeyboardInterrupt:
tx.rollback()
results = ""
return results
def labels(self):
return sorted(list(self.connection().node_labels))
def relationship_types(self):
return sorted(list(self.connection().relationship_types))
def properties(self):
url = self.url + "propertykeys"
r = requests.get(url, auth=(self.username, self.password))
props = r.json()
return sorted(props)
|
import requests
from py2neo import Graph, authenticate
class Neo4j:
def __init__(self, host, port, username=None, password=None):
self.username = username
self.password = password
self.host_port = "{host}:{port}".format(host=host, port=port)
self.url = "http://{host_port}/db/data/".format(host_port=self.host_port)
def connection(self):
if self.username and self.password:
authenticate(self.host_port, self.username, self.password)
graph = Graph(self.url)
return graph
def cypher(self, query):
tx = self.connection().cypher.begin()
try:
tx.append(query)
results = tx.process()
tx.commit()
except Exception as e:
results = e
except KeyboardInterrupt:
tx.rollback()
results = ""
return results
def labels(self):
return sorted(list(self.connection().node_labels))
def relationship_types(self):
return sorted(list(self.connection().relationship_types))
def properties(self):
url = self.url + "propertykeys"
r = requests.get(url, auth=(self.username, self.password))
props = r.json()
return sorted(props)
|
Remove host and port attributes from Neo4j
|
Remove host and port attributes from Neo4j
|
Python
|
mit
|
nicolewhite/cycli,nicolewhite/cycli
|
import requests
from py2neo import Graph, authenticate
class Neo4j:
def __init__(self, host, port, username=None, password=None):
- self.host = host
- self.port = port
self.username = username
self.password = password
self.host_port = "{host}:{port}".format(host=host, port=port)
self.url = "http://{host_port}/db/data/".format(host_port=self.host_port)
def connection(self):
if self.username and self.password:
authenticate(self.host_port, self.username, self.password)
graph = Graph(self.url)
return graph
def cypher(self, query):
tx = self.connection().cypher.begin()
try:
tx.append(query)
results = tx.process()
tx.commit()
except Exception as e:
results = e
except KeyboardInterrupt:
tx.rollback()
results = ""
return results
def labels(self):
return sorted(list(self.connection().node_labels))
def relationship_types(self):
return sorted(list(self.connection().relationship_types))
def properties(self):
url = self.url + "propertykeys"
r = requests.get(url, auth=(self.username, self.password))
props = r.json()
return sorted(props)
|
Remove host and port attributes from Neo4j
|
## Code Before:
import requests
from py2neo import Graph, authenticate
class Neo4j:
def __init__(self, host, port, username=None, password=None):
self.host = host
self.port = port
self.username = username
self.password = password
self.host_port = "{host}:{port}".format(host=host, port=port)
self.url = "http://{host_port}/db/data/".format(host_port=self.host_port)
def connection(self):
if self.username and self.password:
authenticate(self.host_port, self.username, self.password)
graph = Graph(self.url)
return graph
def cypher(self, query):
tx = self.connection().cypher.begin()
try:
tx.append(query)
results = tx.process()
tx.commit()
except Exception as e:
results = e
except KeyboardInterrupt:
tx.rollback()
results = ""
return results
def labels(self):
return sorted(list(self.connection().node_labels))
def relationship_types(self):
return sorted(list(self.connection().relationship_types))
def properties(self):
url = self.url + "propertykeys"
r = requests.get(url, auth=(self.username, self.password))
props = r.json()
return sorted(props)
## Instruction:
Remove host and port attributes from Neo4j
## Code After:
import requests
from py2neo import Graph, authenticate
class Neo4j:
def __init__(self, host, port, username=None, password=None):
self.username = username
self.password = password
self.host_port = "{host}:{port}".format(host=host, port=port)
self.url = "http://{host_port}/db/data/".format(host_port=self.host_port)
def connection(self):
if self.username and self.password:
authenticate(self.host_port, self.username, self.password)
graph = Graph(self.url)
return graph
def cypher(self, query):
tx = self.connection().cypher.begin()
try:
tx.append(query)
results = tx.process()
tx.commit()
except Exception as e:
results = e
except KeyboardInterrupt:
tx.rollback()
results = ""
return results
def labels(self):
return sorted(list(self.connection().node_labels))
def relationship_types(self):
return sorted(list(self.connection().relationship_types))
def properties(self):
url = self.url + "propertykeys"
r = requests.get(url, auth=(self.username, self.password))
props = r.json()
return sorted(props)
|
import requests
from py2neo import Graph, authenticate
class Neo4j:
def __init__(self, host, port, username=None, password=None):
- self.host = host
- self.port = port
self.username = username
self.password = password
self.host_port = "{host}:{port}".format(host=host, port=port)
self.url = "http://{host_port}/db/data/".format(host_port=self.host_port)
def connection(self):
if self.username and self.password:
authenticate(self.host_port, self.username, self.password)
graph = Graph(self.url)
return graph
def cypher(self, query):
tx = self.connection().cypher.begin()
try:
tx.append(query)
results = tx.process()
tx.commit()
except Exception as e:
results = e
except KeyboardInterrupt:
tx.rollback()
results = ""
return results
def labels(self):
return sorted(list(self.connection().node_labels))
def relationship_types(self):
return sorted(list(self.connection().relationship_types))
def properties(self):
url = self.url + "propertykeys"
r = requests.get(url, auth=(self.username, self.password))
props = r.json()
return sorted(props)
|
726370913332fd5e27bb04446b75ef59fb711a9c
|
broadgauge/main.py
|
broadgauge/main.py
|
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
'MAIL_SERVER',
'MAIL_USERNAME',
'MAIL_PASSWORD',
'MAIL_TLS',
'FROM_ADDRESS',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
Read mail settings from config.
|
Read mail settings from config.
|
Python
|
bsd-3-clause
|
fsmk/fsmkschool,anandology/broadgauge
|
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
+ 'MAIL_SERVER',
+ 'MAIL_USERNAME',
+ 'MAIL_PASSWORD',
+ 'MAIL_TLS',
+ 'FROM_ADDRESS',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
Read mail settings from config.
|
## Code Before:
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
## Instruction:
Read mail settings from config.
## Code After:
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
'MAIL_SERVER',
'MAIL_USERNAME',
'MAIL_PASSWORD',
'MAIL_TLS',
'FROM_ADDRESS',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
+ 'MAIL_SERVER',
+ 'MAIL_USERNAME',
+ 'MAIL_PASSWORD',
+ 'MAIL_TLS',
+ 'FROM_ADDRESS',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
c5001c6f6dab2639fdeb5735f4d4f6f7b8d35395
|
pamqp/body.py
|
pamqp/body.py
|
class ContentBody(object):
"""ContentBody carries the value for an AMQP message body frame"""
name = 'ContentBody'
def __init__(self, value=None):
"""Create a new instance of a ContentBody object, passing in the value
of the message body
:param str|unicode|bytes value: The content body
"""
self.value = value
def __len__(self):
"""Return the length of the content body value
:rtype: int
"""
return len(self.value)
def marshal(self):
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
return self.value
def unmarshal(self, data):
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
self.value = data
|
import typing
class ContentBody:
"""ContentBody carries the value for an AMQP message body frame"""
def __init__(self, value: typing.Optional[bytes] = None):
"""Create a new instance of a ContentBody object"""
self.value = value
def __len__(self) -> int:
"""Return the length of the content body value"""
return len(self.value) if self.value else 0
def marshal(self) -> bytes:
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marshaling for the payload in a
content frame.
"""
return self.value
def unmarshal(self, data: bytes) -> typing.NoReturn:
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
"""
self.value = data
|
Update to include typing, cleanup docstrings and code
|
Update to include typing, cleanup docstrings and code
|
Python
|
bsd-3-clause
|
gmr/pamqp
|
+ import typing
- class ContentBody(object):
+ class ContentBody:
"""ContentBody carries the value for an AMQP message body frame"""
+ def __init__(self, value: typing.Optional[bytes] = None):
- name = 'ContentBody'
-
- def __init__(self, value=None):
- """Create a new instance of a ContentBody object, passing in the value
+ """Create a new instance of a ContentBody object"""
- of the message body
-
- :param str|unicode|bytes value: The content body
-
- """
self.value = value
- def __len__(self):
+ def __len__(self) -> int:
- """Return the length of the content body value
+ """Return the length of the content body value"""
+ return len(self.value) if self.value else 0
- :rtype: int
-
- """
- return len(self.value)
-
- def marshal(self):
+ def marshal(self) -> bytes:
"""Return the marshaled content body. This method is here for API
- compatibility, there is no special marhsaling for the payload in a
+ compatibility, there is no special marshaling for the payload in a
content frame.
-
- :rtype: str|unicode|bytes
"""
return self.value
- def unmarshal(self, data):
+ def unmarshal(self, data: bytes) -> typing.NoReturn:
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
- :rtype: str|unicode|bytes
-
"""
self.value = data
|
Update to include typing, cleanup docstrings and code
|
## Code Before:
class ContentBody(object):
"""ContentBody carries the value for an AMQP message body frame"""
name = 'ContentBody'
def __init__(self, value=None):
"""Create a new instance of a ContentBody object, passing in the value
of the message body
:param str|unicode|bytes value: The content body
"""
self.value = value
def __len__(self):
"""Return the length of the content body value
:rtype: int
"""
return len(self.value)
def marshal(self):
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
return self.value
def unmarshal(self, data):
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
self.value = data
## Instruction:
Update to include typing, cleanup docstrings and code
## Code After:
import typing
class ContentBody:
"""ContentBody carries the value for an AMQP message body frame"""
def __init__(self, value: typing.Optional[bytes] = None):
"""Create a new instance of a ContentBody object"""
self.value = value
def __len__(self) -> int:
"""Return the length of the content body value"""
return len(self.value) if self.value else 0
def marshal(self) -> bytes:
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marshaling for the payload in a
content frame.
"""
return self.value
def unmarshal(self, data: bytes) -> typing.NoReturn:
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
"""
self.value = data
|
+ import typing
- class ContentBody(object):
? --------
+ class ContentBody:
"""ContentBody carries the value for an AMQP message body frame"""
+ def __init__(self, value: typing.Optional[bytes] = None):
- name = 'ContentBody'
-
- def __init__(self, value=None):
- """Create a new instance of a ContentBody object, passing in the value
? ^^^^^^^^^^^^^^^^^^^^^^
+ """Create a new instance of a ContentBody object"""
? ^^^
- of the message body
-
- :param str|unicode|bytes value: The content body
-
- """
self.value = value
- def __len__(self):
+ def __len__(self) -> int:
? +++++++
- """Return the length of the content body value
+ """Return the length of the content body value"""
? +++
+ return len(self.value) if self.value else 0
- :rtype: int
-
- """
- return len(self.value)
-
- def marshal(self):
+ def marshal(self) -> bytes:
? +++++++++
"""Return the marshaled content body. This method is here for API
- compatibility, there is no special marhsaling for the payload in a
? -
+ compatibility, there is no special marshaling for the payload in a
? +
content frame.
-
- :rtype: str|unicode|bytes
"""
return self.value
- def unmarshal(self, data):
+ def unmarshal(self, data: bytes) -> typing.NoReturn:
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
- :rtype: str|unicode|bytes
-
"""
self.value = data
|
f1cc57acd774eace69da7ec0ae9a516207d8ff5c
|
pyrfc3339/__init__.py
|
pyrfc3339/__init__.py
|
from generator import generate
from parser import parse
__all__ = ['generate', 'parse']
|
from pyrfc3339.generator import generate
from pyrfc3339.parser import parse
__all__ = ['generate', 'parse']
|
Fix imports for Python 3
|
Fix imports for Python 3
|
Python
|
mit
|
kurtraschke/pyRFC3339
|
- from generator import generate
+ from pyrfc3339.generator import generate
- from parser import parse
+ from pyrfc3339.parser import parse
__all__ = ['generate', 'parse']
|
Fix imports for Python 3
|
## Code Before:
from generator import generate
from parser import parse
__all__ = ['generate', 'parse']
## Instruction:
Fix imports for Python 3
## Code After:
from pyrfc3339.generator import generate
from pyrfc3339.parser import parse
__all__ = ['generate', 'parse']
|
- from generator import generate
+ from pyrfc3339.generator import generate
? ++++++++++
- from parser import parse
+ from pyrfc3339.parser import parse
? ++++++++++
__all__ = ['generate', 'parse']
|
a492b0395ff56f150d2fde506b6536f0324f31f6
|
teerace/local_tests.py
|
teerace/local_tests.py
|
from django.test.simple import run_tests as default_run_tests
from django.conf import settings
def run_tests(test_labels, *args, **kwargs):
del test_labels
return default_run_tests(settings.OUR_APPS, *args, **kwargs)
|
from django.test.simple import DjangoTestSuiteRunner
from django.conf import settings
class LocalTestSuiteRunner(DjangoTestSuiteRunner):
def run_tests(self, test_labels, extra_tests=None, **kwargs):
del test_labels
super(LocalTestSuiteRunner, self).run_tests(settings.OUR_APPS, extra_tests, **kwargs)
|
Test runner is now class-based.
|
Test runner is now class-based.
|
Python
|
bsd-3-clause
|
SushiTee/teerace,SushiTee/teerace,SushiTee/teerace
|
- from django.test.simple import run_tests as default_run_tests
+ from django.test.simple import DjangoTestSuiteRunner
- from django.conf import settings
+ from django.conf import settings
+ class LocalTestSuiteRunner(DjangoTestSuiteRunner):
- def run_tests(test_labels, *args, **kwargs):
- del test_labels
- return default_run_tests(settings.OUR_APPS, *args, **kwargs)
+ def run_tests(self, test_labels, extra_tests=None, **kwargs):
+ del test_labels
+ super(LocalTestSuiteRunner, self).run_tests(settings.OUR_APPS, extra_tests, **kwargs)
+
+
|
Test runner is now class-based.
|
## Code Before:
from django.test.simple import run_tests as default_run_tests
from django.conf import settings
def run_tests(test_labels, *args, **kwargs):
del test_labels
return default_run_tests(settings.OUR_APPS, *args, **kwargs)
## Instruction:
Test runner is now class-based.
## Code After:
from django.test.simple import DjangoTestSuiteRunner
from django.conf import settings
class LocalTestSuiteRunner(DjangoTestSuiteRunner):
def run_tests(self, test_labels, extra_tests=None, **kwargs):
del test_labels
super(LocalTestSuiteRunner, self).run_tests(settings.OUR_APPS, extra_tests, **kwargs)
|
- from django.test.simple import run_tests as default_run_tests
+ from django.test.simple import DjangoTestSuiteRunner
- from django.conf import settings
? -
+ from django.conf import settings
+ class LocalTestSuiteRunner(DjangoTestSuiteRunner):
+
- def run_tests(test_labels, *args, **kwargs):
? ^ ^^
+ def run_tests(self, test_labels, extra_tests=None, **kwargs):
? + ++++++ ^^^^ ^^^ +++++++
- del test_labels
+ del test_labels
? +
- return default_run_tests(settings.OUR_APPS, *args, **kwargs)
+ super(LocalTestSuiteRunner, self).run_tests(settings.OUR_APPS, extra_tests, **kwargs)
+
|
6323084f97ac80a579d9c8ef7d5fec9cd9a3ec4d
|
src/ipf/ipfblock/connection.py
|
src/ipf/ipfblock/connection.py
|
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport().decrease_binded_count()
self._iport().set_free()
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
|
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
if self._oport() is not None:
self._oport().decrease_binded_count()
if self._iport() is not None:
self._iport().set_free()
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
|
Check weakrefs to porst before using in destructor.
|
Check weakrefs to porst before using in destructor.
Prevent raise of exception in case of connect deletion after block.
|
Python
|
lgpl-2.1
|
anton-golubkov/Garland,anton-golubkov/Garland
|
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
+ if self._oport() is not None:
- self._oport().decrease_binded_count()
+ self._oport().decrease_binded_count()
+ if self._iport() is not None:
- self._iport().set_free()
+ self._iport().set_free()
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
|
Check weakrefs to porst before using in destructor.
|
## Code Before:
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport().decrease_binded_count()
self._iport().set_free()
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
## Instruction:
Check weakrefs to porst before using in destructor.
## Code After:
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
if self._oport() is not None:
self._oport().decrease_binded_count()
if self._iport() is not None:
self._iport().set_free()
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
|
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
+ if self._oport() is not None:
- self._oport().decrease_binded_count()
+ self._oport().decrease_binded_count()
? ++++
+ if self._iport() is not None:
- self._iport().set_free()
+ self._iport().set_free()
? ++++
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
|
38ac22c8380e91777c22f7dcb9a5297e9737d522
|
pymatgen/io/cp2k/tests/test_outputs.py
|
pymatgen/io/cp2k/tests/test_outputs.py
|
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
MODULE_DIR = Path(__file__).resolve().parent
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename='../test_files/cp2k.out', auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
|
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
TEST_FILES = Path(__file__).parent.parent.joinpath("test_files").resolve()
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename=TEST_FILES / "cp2k.out", auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
|
Switch output to to use TEST_FILES path
|
Switch output to to use TEST_FILES path
|
Python
|
mit
|
davidwaroquiers/pymatgen,davidwaroquiers/pymatgen,gmatteo/pymatgen,richardtran415/pymatgen,davidwaroquiers/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,davidwaroquiers/pymatgen,gmatteo/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,richardtran415/pymatgen,vorwerkc/pymatgen,gVallverdu/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,richardtran415/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,gVallverdu/pymatgen,richardtran415/pymatgen
|
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
- MODULE_DIR = Path(__file__).resolve().parent
+ TEST_FILES = Path(__file__).parent.parent.joinpath("test_files").resolve()
class SetTest(PymatgenTest):
def setUp(self):
- self.out = Cp2kOutput(filename='../test_files/cp2k.out', auto_load=True)
+ self.out = Cp2kOutput(filename=TEST_FILES / "cp2k.out", auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
|
Switch output to to use TEST_FILES path
|
## Code Before:
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
MODULE_DIR = Path(__file__).resolve().parent
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename='../test_files/cp2k.out', auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
## Instruction:
Switch output to to use TEST_FILES path
## Code After:
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
TEST_FILES = Path(__file__).parent.parent.joinpath("test_files").resolve()
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename=TEST_FILES / "cp2k.out", auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
|
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
- MODULE_DIR = Path(__file__).resolve().parent
+ TEST_FILES = Path(__file__).parent.parent.joinpath("test_files").resolve()
class SetTest(PymatgenTest):
def setUp(self):
- self.out = Cp2kOutput(filename='../test_files/cp2k.out', auto_load=True)
? ^^^ ^^^^^^^^^^^ ^
+ self.out = Cp2kOutput(filename=TEST_FILES / "cp2k.out", auto_load=True)
? ^^^^^^^^^^^ ^^ ^
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.