commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9b032e06156aa011e5d78d0d9ea297420cb33c2e
|
form_designer/contrib/cms_plugins/form_designer_form/migrations/0001_initial.py
|
form_designer/contrib/cms_plugins/form_designer_form/migrations/0001_initial.py
|
from __future__ import unicode_literals
import cms
from django.db import migrations, models
from pkg_resources import parse_version as V
# Django CMS 3.3.1 is oldest release where the change affects.
# Refs https://github.com/divio/django-cms/commit/871a164
if V(cms.__version__) >= V('3.3.1'):
field_kwargs = {'related_name': 'form_designer_form_cmsformdefinition'}
else:
field_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
('form_designer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CMSFormDefinition',
fields=[
('cmsplugin_ptr',
models.OneToOneField(
serialize=False,
auto_created=True,
primary_key=True,
to='cms.CMSPlugin',
parent_link=True,
**field_kwargs)),
('form_definition',
models.ForeignKey(
verbose_name='form',
to='form_designer.FormDefinition')),
],
options={
'abstract': False,
},
bases=(
'cms.cmsplugin',
),
),
]
|
from __future__ import unicode_literals
import cms
from django.db import migrations, models
from pkg_resources import parse_version as V
# Django CMS 3.3.1 is oldest release where the change affects.
# Refs https://github.com/divio/django-cms/commit/871a164
if V(cms.__version__) >= V('3.3.1'):
field_kwargs = {'related_name': 'form_designer_form_cmsformdefinition'}
else:
field_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
('form_designer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CMSFormDefinition',
fields=[
('cmsplugin_ptr',
models.OneToOneField(
serialize=False,
auto_created=True,
primary_key=True,
to='cms.CMSPlugin',
parent_link=True,
on_delete=models.CASCADE,
**field_kwargs)),
('form_definition',
models.ForeignKey(
verbose_name='form',
to='form_designer.FormDefinition',
on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
bases=(
'cms.cmsplugin',
),
),
]
|
Add on_delete args to CMS plugin migration for Django 2 support
|
Add on_delete args to CMS plugin migration for Django 2 support
|
Python
|
bsd-3-clause
|
kcsry/django-form-designer,kcsry/django-form-designer,andersinno/django-form-designer,andersinno/django-form-designer-ai,andersinno/django-form-designer-ai,andersinno/django-form-designer
|
from __future__ import unicode_literals
import cms
from django.db import migrations, models
from pkg_resources import parse_version as V
# Django CMS 3.3.1 is oldest release where the change affects.
# Refs https://github.com/divio/django-cms/commit/871a164
if V(cms.__version__) >= V('3.3.1'):
field_kwargs = {'related_name': 'form_designer_form_cmsformdefinition'}
else:
field_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
('form_designer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CMSFormDefinition',
fields=[
('cmsplugin_ptr',
models.OneToOneField(
serialize=False,
auto_created=True,
primary_key=True,
to='cms.CMSPlugin',
parent_link=True,
+ on_delete=models.CASCADE,
**field_kwargs)),
('form_definition',
models.ForeignKey(
verbose_name='form',
- to='form_designer.FormDefinition')),
+ to='form_designer.FormDefinition',
+ on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
bases=(
'cms.cmsplugin',
),
),
]
|
Add on_delete args to CMS plugin migration for Django 2 support
|
## Code Before:
from __future__ import unicode_literals
import cms
from django.db import migrations, models
from pkg_resources import parse_version as V
# Django CMS 3.3.1 is oldest release where the change affects.
# Refs https://github.com/divio/django-cms/commit/871a164
if V(cms.__version__) >= V('3.3.1'):
field_kwargs = {'related_name': 'form_designer_form_cmsformdefinition'}
else:
field_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
('form_designer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CMSFormDefinition',
fields=[
('cmsplugin_ptr',
models.OneToOneField(
serialize=False,
auto_created=True,
primary_key=True,
to='cms.CMSPlugin',
parent_link=True,
**field_kwargs)),
('form_definition',
models.ForeignKey(
verbose_name='form',
to='form_designer.FormDefinition')),
],
options={
'abstract': False,
},
bases=(
'cms.cmsplugin',
),
),
]
## Instruction:
Add on_delete args to CMS plugin migration for Django 2 support
## Code After:
from __future__ import unicode_literals
import cms
from django.db import migrations, models
from pkg_resources import parse_version as V
# Django CMS 3.3.1 is oldest release where the change affects.
# Refs https://github.com/divio/django-cms/commit/871a164
if V(cms.__version__) >= V('3.3.1'):
field_kwargs = {'related_name': 'form_designer_form_cmsformdefinition'}
else:
field_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
('form_designer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CMSFormDefinition',
fields=[
('cmsplugin_ptr',
models.OneToOneField(
serialize=False,
auto_created=True,
primary_key=True,
to='cms.CMSPlugin',
parent_link=True,
on_delete=models.CASCADE,
**field_kwargs)),
('form_definition',
models.ForeignKey(
verbose_name='form',
to='form_designer.FormDefinition',
on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
bases=(
'cms.cmsplugin',
),
),
]
|
from __future__ import unicode_literals
import cms
from django.db import migrations, models
from pkg_resources import parse_version as V
# Django CMS 3.3.1 is oldest release where the change affects.
# Refs https://github.com/divio/django-cms/commit/871a164
if V(cms.__version__) >= V('3.3.1'):
field_kwargs = {'related_name': 'form_designer_form_cmsformdefinition'}
else:
field_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
('form_designer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CMSFormDefinition',
fields=[
('cmsplugin_ptr',
models.OneToOneField(
serialize=False,
auto_created=True,
primary_key=True,
to='cms.CMSPlugin',
parent_link=True,
+ on_delete=models.CASCADE,
**field_kwargs)),
('form_definition',
models.ForeignKey(
verbose_name='form',
- to='form_designer.FormDefinition')),
? --
+ to='form_designer.FormDefinition',
+ on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
bases=(
'cms.cmsplugin',
),
),
]
|
6325b0eebbe5c14284df4fa5398ffc678c3e0eca
|
posts/tests.py
|
posts/tests.py
|
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
post = PostFactory()
post_list_url = self.reverse('post:list')
self.get_check_200(post_list_url)
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
user = self.make_user('jelly jelly')
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
# given
post = PostFactory()
post_list_url = self.reverse('post:list')
# when
self.get_check_200(post_list_url)
# then
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
# given
user = self.make_user('jelly jelly')
# when
with self.login(username=user.username):
# then
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
# given
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
# when
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
# then
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
Add given, when, then comment
|
Add given, when, then comment
|
Python
|
mit
|
9XD/9XD,9XD/9XD,9XD/9XD,9XD/9XD
|
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
+ # given
post = PostFactory()
post_list_url = self.reverse('post:list')
+ # when
self.get_check_200(post_list_url)
+ # then
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
-
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
+ # given
user = self.make_user('jelly jelly')
-
+ # when
with self.login(username=user.username):
+ # then
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
+ # given
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
-
+ # when
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
+ # then
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
Add given, when, then comment
|
## Code Before:
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
post = PostFactory()
post_list_url = self.reverse('post:list')
self.get_check_200(post_list_url)
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
user = self.make_user('jelly jelly')
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
self.post('post:write', data=data)
## Instruction:
Add given, when, then comment
## Code After:
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
# given
post = PostFactory()
post_list_url = self.reverse('post:list')
# when
self.get_check_200(post_list_url)
# then
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
# given
user = self.make_user('jelly jelly')
# when
with self.login(username=user.username):
# then
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
# given
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
# when
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
# then
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
+ # given
post = PostFactory()
post_list_url = self.reverse('post:list')
+ # when
self.get_check_200(post_list_url)
+ # then
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
-
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
+ # given
user = self.make_user('jelly jelly')
-
+ # when
with self.login(username=user.username):
+ # then
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
+ # given
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
-
+ # when
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
+ # then
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
c0c902cc356d1a3142a6d260e7b768114449013e
|
tutorials/models.py
|
tutorials/models.py
|
from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
|
from django.db import models
from django.urls import reverse
from markdownx.models import MarkdownxField
# add options if needed
CATEGORY_OPTIONS = [('io', 'I/O'), ('intro', 'Introduction')]
LEVEL_OPTIONS = [(1, '1'), (2, '2'), (3, '3')]
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
category = models.CharField(max_length=15, choices=CATEGORY_OPTIONS, blank=True)
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
level = models.IntegerField(choices=LEVEL_OPTIONS, null=True)
def get_absolute_url (self):
return reverse('detail_tutorial', args=[self.id])
|
Add options for choices fields, Add new fields to Tutorial model
|
Add options for choices fields, Add new fields to Tutorial model
|
Python
|
agpl-3.0
|
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
|
from django.db import models
+ from django.urls import reverse
+
from markdownx.models import MarkdownxField
+ # add options if needed
+ CATEGORY_OPTIONS = [('io', 'I/O'), ('intro', 'Introduction')]
+ LEVEL_OPTIONS = [(1, '1'), (2, '2'), (3, '3')]
+
# Create your models here.
-
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
- # Category = models.TextField()
+ category = models.CharField(max_length=15, choices=CATEGORY_OPTIONS, blank=True)
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
- # Level = models.IntegerField()
+ level = models.IntegerField(choices=LEVEL_OPTIONS, null=True)
+ def get_absolute_url (self):
+ return reverse('detail_tutorial', args=[self.id])
+
|
Add options for choices fields, Add new fields to Tutorial model
|
## Code Before:
from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
## Instruction:
Add options for choices fields, Add new fields to Tutorial model
## Code After:
from django.db import models
from django.urls import reverse
from markdownx.models import MarkdownxField
# add options if needed
CATEGORY_OPTIONS = [('io', 'I/O'), ('intro', 'Introduction')]
LEVEL_OPTIONS = [(1, '1'), (2, '2'), (3, '3')]
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
category = models.CharField(max_length=15, choices=CATEGORY_OPTIONS, blank=True)
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
level = models.IntegerField(choices=LEVEL_OPTIONS, null=True)
def get_absolute_url (self):
return reverse('detail_tutorial', args=[self.id])
|
from django.db import models
+ from django.urls import reverse
+
from markdownx.models import MarkdownxField
+ # add options if needed
+ CATEGORY_OPTIONS = [('io', 'I/O'), ('intro', 'Introduction')]
+ LEVEL_OPTIONS = [(1, '1'), (2, '2'), (3, '3')]
+
# Create your models here.
-
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
- # Category = models.TextField()
+ category = models.CharField(max_length=15, choices=CATEGORY_OPTIONS, blank=True)
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
- # Level = models.IntegerField()
+ level = models.IntegerField(choices=LEVEL_OPTIONS, null=True)
+
+ def get_absolute_url (self):
+ return reverse('detail_tutorial', args=[self.id])
|
159006e87cbbb08689284ae2534f556f66c0159b
|
alabaster/__init__.py
|
alabaster/__init__.py
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
return {'version': version.__version__,
'parallel_read_safe': True}
|
Declare extension version and parallel read safety
|
Declare extension version and parallel read safety
This is necessary for Sphinx' parallel read feature to work, since we import alabaster all the time now.
|
Python
|
bsd-3-clause
|
bgeron/alabaster,jupytercalpoly/jupyter-alabaster-theme,ellisonbg/jupyter-alabaster-theme,nikolas/alabaster,charnpreetsingh/jupyter-alabaster-theme,jupytercalpoly/jupyter-alabaster-theme,charnpreetsingh/jupyter-alabaster-theme,bgeron/alabaster,ellisonbg/jupyter-alabaster-theme,nikolas/alabaster,charnpreetsingh/jupyter-alabaster-theme,jupytercalpoly/jupyter-alabaster-theme,ellisonbg/jupyter-alabaster-theme
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
+ return {'version': version.__version__,
+ 'parallel_read_safe': True}
|
Declare extension version and parallel read safety
|
## Code Before:
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
## Instruction:
Declare extension version and parallel read safety
## Code After:
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
return {'version': version.__version__,
'parallel_read_safe': True}
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
+ return {'version': version.__version__,
+ 'parallel_read_safe': True}
|
1361b5ebb4afd0c3c80df5bf936f3817427cd917
|
apps/pages/views.py
|
apps/pages/views.py
|
from django.views.generic import DetailView
from .models import Page
class PageView(DetailView):
model = Page
def get_object(self, queryset=None):
slug = self.kwargs.get('slug')
if not slug:
slug = 'index'
return self.get_queryset().get(slug=slug)
|
from django.views.generic import DetailView
from django.shortcuts import get_object_or_404
from .models import Page
class PageView(DetailView):
model = Page
def get_object(self, queryset=None):
slug = self.kwargs.get('slug')
if not slug:
slug = 'index'
return get_object_or_404(self.get_queryset(), slug=slug)
|
Return 404 in case page is not found
|
Return 404 in case page is not found
|
Python
|
mit
|
MeirKriheli/debian.org.il,MeirKriheli/debian.org.il
|
from django.views.generic import DetailView
+ from django.shortcuts import get_object_or_404
from .models import Page
class PageView(DetailView):
model = Page
def get_object(self, queryset=None):
slug = self.kwargs.get('slug')
if not slug:
slug = 'index'
- return self.get_queryset().get(slug=slug)
+ return get_object_or_404(self.get_queryset(), slug=slug)
|
Return 404 in case page is not found
|
## Code Before:
from django.views.generic import DetailView
from .models import Page
class PageView(DetailView):
model = Page
def get_object(self, queryset=None):
slug = self.kwargs.get('slug')
if not slug:
slug = 'index'
return self.get_queryset().get(slug=slug)
## Instruction:
Return 404 in case page is not found
## Code After:
from django.views.generic import DetailView
from django.shortcuts import get_object_or_404
from .models import Page
class PageView(DetailView):
model = Page
def get_object(self, queryset=None):
slug = self.kwargs.get('slug')
if not slug:
slug = 'index'
return get_object_or_404(self.get_queryset(), slug=slug)
|
from django.views.generic import DetailView
+ from django.shortcuts import get_object_or_404
from .models import Page
class PageView(DetailView):
model = Page
def get_object(self, queryset=None):
slug = self.kwargs.get('slug')
if not slug:
slug = 'index'
- return self.get_queryset().get(slug=slug)
? ^^^^^
+ return get_object_or_404(self.get_queryset(), slug=slug)
? ++++++++++++++++++ ^^
|
1160a10a3d78eee3be61d760849e70e645272355
|
analytical/__init__.py
|
analytical/__init__.py
|
__author__ = "Joost Cassee"
__email__ = "[email protected]"
__version__ = "0.9.0"
__copyright__ = "Copyright (C) 2011 Joost Cassee"
__license__ = "MIT License"
|
__author__ = "Joost Cassee"
__email__ = "[email protected]"
__version__ = "0.9.0"
__copyright__ = "Copyright (C) 2011 Joost Cassee and others"
__license__ = "MIT License"
|
Add 'and others' to copyright
|
Add 'and others' to copyright
|
Python
|
mit
|
jcassee/django-analytical,ericdwang/django-analytical,machtfit/django-analytical,apocquet/django-analytical,bittner/django-analytical,ChristosChristofidis/django-analytical,pjdelport/django-analytical
|
__author__ = "Joost Cassee"
__email__ = "[email protected]"
__version__ = "0.9.0"
- __copyright__ = "Copyright (C) 2011 Joost Cassee"
+ __copyright__ = "Copyright (C) 2011 Joost Cassee and others"
__license__ = "MIT License"
|
Add 'and others' to copyright
|
## Code Before:
__author__ = "Joost Cassee"
__email__ = "[email protected]"
__version__ = "0.9.0"
__copyright__ = "Copyright (C) 2011 Joost Cassee"
__license__ = "MIT License"
## Instruction:
Add 'and others' to copyright
## Code After:
__author__ = "Joost Cassee"
__email__ = "[email protected]"
__version__ = "0.9.0"
__copyright__ = "Copyright (C) 2011 Joost Cassee and others"
__license__ = "MIT License"
|
__author__ = "Joost Cassee"
__email__ = "[email protected]"
__version__ = "0.9.0"
- __copyright__ = "Copyright (C) 2011 Joost Cassee"
+ __copyright__ = "Copyright (C) 2011 Joost Cassee and others"
? +++++++++++
__license__ = "MIT License"
|
b77a3f47876d824d2e0f1c009a6d580fc5d41ec6
|
accelerator/migrations/0019_add_deferred_user_role.py
|
accelerator/migrations/0019_add_deferred_user_role.py
|
from django.db import migrations
def add_deferred_user_role(apps, schema_editor):
DEFERRED_MENTOR = 'Deferred Mentor'
UserRole = apps.get_model('accelerator', 'UserRole')
Program = apps.get_model('accelerator', 'Program')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
user_role = UserRole.objects.create(
name=DEFERRED_MENTOR, sort_order='17')
for program in Program.objects.all():
name = "{} {} Deferred Mentor".format(
program.start_date.year,
program.program_family.name)
return ProgramRole.objects.get_or_create(
program=program,
user_role=user_role,
defaults={'name': name})
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0018_make_location_nonrequired'),
]
operations = [
migrations.RunPython(add_deferred_user_role,
migrations.RunPython.noop)
]
|
from django.db import migrations
def add_deferred_user_role(apps, schema_editor):
DEFERRED_MENTOR = 'Deferred Mentor'
UserRole = apps.get_model('accelerator', 'UserRole')
Program = apps.get_model('accelerator', 'Program')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
if UserRole.objects.filter(name=DEFERRED_MENTOR).exists():
user_role = UserRole.objects.filter(user=DEFERRED_MENTOR)[0]
else:
user_role = UserRole.objects.create(name=DEFERRED_MENTOR,
sort_order=17)
for program in Program.objects.all():
if not ProgramRole.objects.filter(user_role=user_role,
program=program).exists():
name = "{} {} ({}-{})".format(
(program.end_date.year if program.end_date else ""),
DEFERRED_MENTOR,
program.program_family.url_slug.upper(),
program.pk)
ProgramRole.objects.get_or_create(
program=program,
user_role=user_role,
name=name)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0018_make_location_nonrequired'),
]
operations = [
migrations.RunPython(add_deferred_user_role,
migrations.RunPython.noop)
]
|
Make changes to the migration file
|
[AC-7594] Make changes to the migration file
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
from django.db import migrations
def add_deferred_user_role(apps, schema_editor):
DEFERRED_MENTOR = 'Deferred Mentor'
UserRole = apps.get_model('accelerator', 'UserRole')
Program = apps.get_model('accelerator', 'Program')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
+ if UserRole.objects.filter(name=DEFERRED_MENTOR).exists():
+ user_role = UserRole.objects.filter(user=DEFERRED_MENTOR)[0]
+ else:
- user_role = UserRole.objects.create(
+ user_role = UserRole.objects.create(name=DEFERRED_MENTOR,
- name=DEFERRED_MENTOR, sort_order='17')
+ sort_order=17)
for program in Program.objects.all():
- name = "{} {} Deferred Mentor".format(
- program.start_date.year,
+ if not ProgramRole.objects.filter(user_role=user_role,
+ program=program).exists():
+ name = "{} {} ({}-{})".format(
+ (program.end_date.year if program.end_date else ""),
+ DEFERRED_MENTOR,
- program.program_family.name)
+ program.program_family.url_slug.upper(),
+ program.pk)
+
- return ProgramRole.objects.get_or_create(
+ ProgramRole.objects.get_or_create(
program=program,
user_role=user_role,
- defaults={'name': name})
+ name=name)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0018_make_location_nonrequired'),
]
operations = [
migrations.RunPython(add_deferred_user_role,
migrations.RunPython.noop)
]
|
Make changes to the migration file
|
## Code Before:
from django.db import migrations
def add_deferred_user_role(apps, schema_editor):
DEFERRED_MENTOR = 'Deferred Mentor'
UserRole = apps.get_model('accelerator', 'UserRole')
Program = apps.get_model('accelerator', 'Program')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
user_role = UserRole.objects.create(
name=DEFERRED_MENTOR, sort_order='17')
for program in Program.objects.all():
name = "{} {} Deferred Mentor".format(
program.start_date.year,
program.program_family.name)
return ProgramRole.objects.get_or_create(
program=program,
user_role=user_role,
defaults={'name': name})
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0018_make_location_nonrequired'),
]
operations = [
migrations.RunPython(add_deferred_user_role,
migrations.RunPython.noop)
]
## Instruction:
Make changes to the migration file
## Code After:
from django.db import migrations
def add_deferred_user_role(apps, schema_editor):
DEFERRED_MENTOR = 'Deferred Mentor'
UserRole = apps.get_model('accelerator', 'UserRole')
Program = apps.get_model('accelerator', 'Program')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
if UserRole.objects.filter(name=DEFERRED_MENTOR).exists():
user_role = UserRole.objects.filter(user=DEFERRED_MENTOR)[0]
else:
user_role = UserRole.objects.create(name=DEFERRED_MENTOR,
sort_order=17)
for program in Program.objects.all():
if not ProgramRole.objects.filter(user_role=user_role,
program=program).exists():
name = "{} {} ({}-{})".format(
(program.end_date.year if program.end_date else ""),
DEFERRED_MENTOR,
program.program_family.url_slug.upper(),
program.pk)
ProgramRole.objects.get_or_create(
program=program,
user_role=user_role,
name=name)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0018_make_location_nonrequired'),
]
operations = [
migrations.RunPython(add_deferred_user_role,
migrations.RunPython.noop)
]
|
from django.db import migrations
def add_deferred_user_role(apps, schema_editor):
DEFERRED_MENTOR = 'Deferred Mentor'
UserRole = apps.get_model('accelerator', 'UserRole')
Program = apps.get_model('accelerator', 'Program')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
+ if UserRole.objects.filter(name=DEFERRED_MENTOR).exists():
+ user_role = UserRole.objects.filter(user=DEFERRED_MENTOR)[0]
+ else:
- user_role = UserRole.objects.create(
+ user_role = UserRole.objects.create(name=DEFERRED_MENTOR,
? ++++ +++++++++++++++++++++
- name=DEFERRED_MENTOR, sort_order='17')
+ sort_order=17)
for program in Program.objects.all():
- name = "{} {} Deferred Mentor".format(
- program.start_date.year,
+ if not ProgramRole.objects.filter(user_role=user_role,
+ program=program).exists():
+ name = "{} {} ({}-{})".format(
+ (program.end_date.year if program.end_date else ""),
+ DEFERRED_MENTOR,
- program.program_family.name)
? ^^^
+ program.program_family.url_slug.upper(),
? ^^^^^^^^^^^^ ++ +
+ program.pk)
+
- return ProgramRole.objects.get_or_create(
? ^^^^^^
+ ProgramRole.objects.get_or_create(
? ^^^
program=program,
user_role=user_role,
- defaults={'name': name})
+ name=name)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0018_make_location_nonrequired'),
]
operations = [
migrations.RunPython(add_deferred_user_role,
migrations.RunPython.noop)
]
|
a0a0d120552eeb304ac4b49648a43be5cf83cdcb
|
piper/core.py
|
piper/core.py
|
class Piper(object):
"""
The main runner.
This class loads the configurations, sets up all other components, and
finally executes them in whatever order they are supposed to happen in.
"""
def __init__(self):
pass
|
import logbook
class Piper(object):
"""
The main pipeline runner.
This class loads the configurations, sets up all other components,
executes them in whatever order they are supposed to happen in, collects
data about the state of the pipeline and persists it, and finally tears
down the components that needs tearing down.
The functions are almost executed in the order found in this file. Woo!
"""
def __init__(self):
self.log = logbook.Logger(self.__class__.__name__)
def setup(self):
"""
Performs all setup steps
This is basically an umbrella function that runs setup for all the
things that the class needs to run a fully configured execute().
"""
pass
def load_config(self):
"""
Parses the configuration file and dies in flames if there are errors.
"""
pass
def setup_environment(self):
"""
Load the environment and it's configuration
"""
pass
def setup_steps(self):
"""
Loads the steps and their configuration.
Also determines which collection of steps is to be ran.
"""
pass
def execute(self):
"""
Runs the steps and determines whether to continue or not.
Of all the things to happen in this application, this is probably
the most important part!
"""
pass
def save_state(self):
"""
Collects all data about the pipeline being built and persists it.
"""
pass
def teardown_environment(self):
"""
Execute teardown step of the environment
"""
pass
|
Add more skeletonisms and documentation for Piper()
|
Add more skeletonisms and documentation for Piper()
|
Python
|
mit
|
thiderman/piper
|
+ import logbook
+
+
class Piper(object):
"""
- The main runner.
+ The main pipeline runner.
- This class loads the configurations, sets up all other components, and
+ This class loads the configurations, sets up all other components,
- finally executes them in whatever order they are supposed to happen in.
+ executes them in whatever order they are supposed to happen in, collects
+ data about the state of the pipeline and persists it, and finally tears
+ down the components that needs tearing down.
+
+ The functions are almost executed in the order found in this file. Woo!
"""
def __init__(self):
+ self.log = logbook.Logger(self.__class__.__name__)
+
+ def setup(self):
+ """
+ Performs all setup steps
+
+ This is basically an umbrella function that runs setup for all the
+ things that the class needs to run a fully configured execute().
+
+ """
+
pass
+ def load_config(self):
+ """
+ Parses the configuration file and dies in flames if there are errors.
+
+ """
+
+ pass
+
+ def setup_environment(self):
+ """
+ Load the environment and it's configuration
+
+ """
+
+ pass
+
+ def setup_steps(self):
+ """
+ Loads the steps and their configuration.
+
+ Also determines which collection of steps is to be ran.
+
+ """
+
+ pass
+
+ def execute(self):
+ """
+ Runs the steps and determines whether to continue or not.
+
+ Of all the things to happen in this application, this is probably
+ the most important part!
+
+ """
+
+ pass
+
+ def save_state(self):
+ """
+ Collects all data about the pipeline being built and persists it.
+
+ """
+
+ pass
+
+ def teardown_environment(self):
+ """
+ Execute teardown step of the environment
+
+ """
+
+ pass
+
|
Add more skeletonisms and documentation for Piper()
|
## Code Before:
class Piper(object):
"""
The main runner.
This class loads the configurations, sets up all other components, and
finally executes them in whatever order they are supposed to happen in.
"""
def __init__(self):
pass
## Instruction:
Add more skeletonisms and documentation for Piper()
## Code After:
import logbook
class Piper(object):
"""
The main pipeline runner.
This class loads the configurations, sets up all other components,
executes them in whatever order they are supposed to happen in, collects
data about the state of the pipeline and persists it, and finally tears
down the components that needs tearing down.
The functions are almost executed in the order found in this file. Woo!
"""
def __init__(self):
self.log = logbook.Logger(self.__class__.__name__)
def setup(self):
"""
Performs all setup steps
This is basically an umbrella function that runs setup for all the
things that the class needs to run a fully configured execute().
"""
pass
def load_config(self):
"""
Parses the configuration file and dies in flames if there are errors.
"""
pass
def setup_environment(self):
"""
Load the environment and it's configuration
"""
pass
def setup_steps(self):
"""
Loads the steps and their configuration.
Also determines which collection of steps is to be ran.
"""
pass
def execute(self):
"""
Runs the steps and determines whether to continue or not.
Of all the things to happen in this application, this is probably
the most important part!
"""
pass
def save_state(self):
"""
Collects all data about the pipeline being built and persists it.
"""
pass
def teardown_environment(self):
"""
Execute teardown step of the environment
"""
pass
|
+ import logbook
+
+
class Piper(object):
"""
- The main runner.
+ The main pipeline runner.
? +++++++++
- This class loads the configurations, sets up all other components, and
? ----
+ This class loads the configurations, sets up all other components,
- finally executes them in whatever order they are supposed to happen in.
? -------- ^
+ executes them in whatever order they are supposed to happen in, collects
? ^^^^^^^^^^
+ data about the state of the pipeline and persists it, and finally tears
+ down the components that needs tearing down.
+
+ The functions are almost executed in the order found in this file. Woo!
"""
def __init__(self):
+ self.log = logbook.Logger(self.__class__.__name__)
+
+ def setup(self):
+ """
+ Performs all setup steps
+
+ This is basically an umbrella function that runs setup for all the
+ things that the class needs to run a fully configured execute().
+
+ """
+
pass
+
+ def load_config(self):
+ """
+ Parses the configuration file and dies in flames if there are errors.
+
+ """
+
+ pass
+
+ def setup_environment(self):
+ """
+ Load the environment and it's configuration
+
+ """
+
+ pass
+
+ def setup_steps(self):
+ """
+ Loads the steps and their configuration.
+
+ Also determines which collection of steps is to be ran.
+
+ """
+
+ pass
+
+ def execute(self):
+ """
+ Runs the steps and determines whether to continue or not.
+
+ Of all the things to happen in this application, this is probably
+ the most important part!
+
+ """
+
+ pass
+
+ def save_state(self):
+ """
+ Collects all data about the pipeline being built and persists it.
+
+ """
+
+ pass
+
+ def teardown_environment(self):
+ """
+ Execute teardown step of the environment
+
+ """
+
+ pass
|
a8596fd4a76460bd3e15509825d3cb3f82a3f8c4
|
test/integration/ggrc/converters/test_import_delete.py
|
test/integration/ggrc/converters/test_import_delete.py
|
from ggrc.converters import errors
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data_dry = self.import_file(filename, dry_run=True)
response_data = self.import_file(filename)
self.assertEqual(response_data_dry, response_data)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
|
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data = self.import_file(filename)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
|
Optimize basic delete import tests
|
Optimize basic delete import tests
The dry-run check is now automatically performed on each import and we
do not need to duplicate the work in the delete test.
|
Python
|
apache-2.0
|
selahssea/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core
|
- from ggrc.converters import errors
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
- response_data_dry = self.import_file(filename, dry_run=True)
response_data = self.import_file(filename)
- self.assertEqual(response_data_dry, response_data)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
|
Optimize basic delete import tests
|
## Code Before:
from ggrc.converters import errors
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data_dry = self.import_file(filename, dry_run=True)
response_data = self.import_file(filename)
self.assertEqual(response_data_dry, response_data)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
## Instruction:
Optimize basic delete import tests
## Code After:
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data = self.import_file(filename)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
|
- from ggrc.converters import errors
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
- response_data_dry = self.import_file(filename, dry_run=True)
response_data = self.import_file(filename)
- self.assertEqual(response_data_dry, response_data)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
|
b6e393271971426506557a208be93d8b79d55cc3
|
examples/image_captioning/download.py
|
examples/image_captioning/download.py
|
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except FileExistsError:
raise FileExistsError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except OSError:
raise OSError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
Fix error type for Python 2
|
Fix error type for Python 2
|
Python
|
mit
|
chainer/chainer,ktnyt/chainer,hvy/chainer,aonotas/chainer,wkentaro/chainer,tkerola/chainer,chainer/chainer,keisuke-umezawa/chainer,niboshi/chainer,okuta/chainer,niboshi/chainer,ktnyt/chainer,rezoo/chainer,jnishi/chainer,okuta/chainer,hvy/chainer,jnishi/chainer,wkentaro/chainer,hvy/chainer,wkentaro/chainer,jnishi/chainer,niboshi/chainer,chainer/chainer,anaruse/chainer,ktnyt/chainer,wkentaro/chainer,niboshi/chainer,pfnet/chainer,jnishi/chainer,hvy/chainer,keisuke-umezawa/chainer,ktnyt/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,okuta/chainer,chainer/chainer,ronekko/chainer,okuta/chainer
|
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
- except FileExistsError:
+ except OSError:
- raise FileExistsError(
+ raise OSError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
Fix error type for Python 2
|
## Code Before:
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except FileExistsError:
raise FileExistsError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
## Instruction:
Fix error type for Python 2
## Code After:
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except OSError:
raise OSError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
- except FileExistsError:
+ except OSError:
- raise FileExistsError(
? ^^^^^^^^^^
+ raise OSError(
? ^^
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
c604ace9394cdc1c0c0a3002cbb3d90dd64695f3
|
examples/mnist-classifier.py
|
examples/mnist-classifier.py
|
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier.pkl.gz')
Main().train().save(path)
print 'saved network to', path
|
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
m = Main()
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier-%s.pkl.gz' % m.opts.layers)
if os.path.exists(path):
m.net.load(path)
m.train()
m.net.save(path)
|
Save mnist classifier model in a file named with the network topology.
|
Save mnist classifier model in a file named with the network topology.
|
Python
|
mit
|
lmjohns3/theanets,chrinide/theanets,devdoer/theanets
|
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
+ m = Main()
- path = os.path.join(tempfile.gettempdir(), 'mnist-classifier.pkl.gz')
+ path = os.path.join(tempfile.gettempdir(), 'mnist-classifier-%s.pkl.gz' % m.opts.layers)
- Main().train().save(path)
- print 'saved network to', path
+ if os.path.exists(path):
+ m.net.load(path)
+ m.train()
+ m.net.save(path)
|
Save mnist classifier model in a file named with the network topology.
|
## Code Before:
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier.pkl.gz')
Main().train().save(path)
print 'saved network to', path
## Instruction:
Save mnist classifier model in a file named with the network topology.
## Code After:
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
m = Main()
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier-%s.pkl.gz' % m.opts.layers)
if os.path.exists(path):
m.net.load(path)
m.train()
m.net.save(path)
|
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
+ m = Main()
- path = os.path.join(tempfile.gettempdir(), 'mnist-classifier.pkl.gz')
+ path = os.path.join(tempfile.gettempdir(), 'mnist-classifier-%s.pkl.gz' % m.opts.layers)
? +++ ++++++++++++++++
- Main().train().save(path)
- print 'saved network to', path
+ if os.path.exists(path):
+ m.net.load(path)
+ m.train()
+ m.net.save(path)
|
0f54bb7a1a26bb3e7192b30cc426fbaeb92caaed
|
tests/utils/test_settings.py
|
tests/utils/test_settings.py
|
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setting_creation(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
|
from app import db, cache
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setitem(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
def test_getitem(self):
setting = Setting(name='foo', value='bar')
db.session.add(setting)
db.session.commit()
# We need to delete the Setting dictionary cache manually,
# since we didn't add the setting through the AppSettings interface
cache.delete_memoized(Setting.as_dict)
self.assertEqual(self.app.config['SETTINGS']['foo'], 'bar')
|
Add __getitem__ test for AppSettings
|
Add __getitem__ test for AppSettings
|
Python
|
mit
|
Encrylize/flask-blogger,Encrylize/flask-blogger,Encrylize/flask-blogger
|
+ from app import db, cache
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
- def test_setting_creation(self):
+ def test_setitem(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
+ def test_getitem(self):
+ setting = Setting(name='foo', value='bar')
+ db.session.add(setting)
+ db.session.commit()
+
+ # We need to delete the Setting dictionary cache manually,
+ # since we didn't add the setting through the AppSettings interface
+ cache.delete_memoized(Setting.as_dict)
+
+ self.assertEqual(self.app.config['SETTINGS']['foo'], 'bar')
+
|
Add __getitem__ test for AppSettings
|
## Code Before:
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setting_creation(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
## Instruction:
Add __getitem__ test for AppSettings
## Code After:
from app import db, cache
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setitem(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
def test_getitem(self):
setting = Setting(name='foo', value='bar')
db.session.add(setting)
db.session.commit()
# We need to delete the Setting dictionary cache manually,
# since we didn't add the setting through the AppSettings interface
cache.delete_memoized(Setting.as_dict)
self.assertEqual(self.app.config['SETTINGS']['foo'], 'bar')
|
+ from app import db, cache
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
- def test_setting_creation(self):
? ------ ^^^^^
+ def test_setitem(self):
? + ^
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
+
+ def test_getitem(self):
+ setting = Setting(name='foo', value='bar')
+ db.session.add(setting)
+ db.session.commit()
+
+ # We need to delete the Setting dictionary cache manually,
+ # since we didn't add the setting through the AppSettings interface
+ cache.delete_memoized(Setting.as_dict)
+
+ self.assertEqual(self.app.config['SETTINGS']['foo'], 'bar')
|
db960486f223e04fe08a8f2b9619aa887dcafeda
|
yuno.py
|
yuno.py
|
import os
import re
import sys
from yuno.core import cli, config
from yuno.core.util import working_dir
def main(argv=None):
# Figure out where Yuno lives so plugins can cd correctly if they need to.
yuno_home = os.path.abspath(os.path.dirname(__file__))
config.update('YUNO_HOME', yuno_home)
with working_dir(yuno_home):
args, subcommand_args = cli.get_cli_args()
load_settings(args.runtime_settings, args.command)
program = __import__(
'yuno.{command}.{command}'.format(command=args.command),
fromlist=['yuno.' + args.command]
)
program.main(subcommand_args)
def load_settings(runtime_settings, plugin_name):
plugin_name = re.sub('[^a-z0-9_]', '', plugin_name, flags=re.I)
plugin_settings_file = 'yuno/%s/settings/config.json' % plugin_name
config.load_default()
if os.path.isfile(plugin_settings_file):
config.load_json(plugin_settings_file)
for override in runtime_settings or []:
key = override[0]
if isinstance(getattr(config.config, key), list):
value = override[1:]
else:
value = override[1]
config.update(key, value)
if __name__ == '__main__':
main()
|
import os
import re
import sys
from yuno.core import cli, config
from yuno.core.util import working_dir
def main(argv=None):
# Figure out where Yuno lives so plugins can cd correctly if they need to.
yuno_home = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
config.update('YUNO_HOME', yuno_home)
with working_dir(yuno_home):
args, subcommand_args = cli.get_cli_args()
load_settings(args.runtime_settings, args.command)
program = __import__(
'yuno.{command}.{command}'.format(command=args.command),
fromlist=['yuno.' + args.command]
)
program.main(subcommand_args)
def load_settings(runtime_settings, plugin_name):
plugin_name = re.sub('[^a-z0-9_]', '', plugin_name, flags=re.I)
plugin_settings_file = 'yuno/%s/settings/config.json' % plugin_name
config.load_default()
if os.path.isfile(plugin_settings_file):
config.load_json(plugin_settings_file)
for override in runtime_settings or []:
key = override[0]
if isinstance(getattr(config.config, key), list):
value = override[1:]
else:
value = override[1]
config.update(key, value)
if __name__ == '__main__':
main()
|
Resolve symlinks when detecting YUNO_HOME.
|
Resolve symlinks when detecting YUNO_HOME.
|
Python
|
mit
|
bulatb/yuno,bulatb/yuno
|
import os
import re
import sys
from yuno.core import cli, config
from yuno.core.util import working_dir
def main(argv=None):
# Figure out where Yuno lives so plugins can cd correctly if they need to.
- yuno_home = os.path.abspath(os.path.dirname(__file__))
+ yuno_home = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
config.update('YUNO_HOME', yuno_home)
with working_dir(yuno_home):
args, subcommand_args = cli.get_cli_args()
load_settings(args.runtime_settings, args.command)
program = __import__(
'yuno.{command}.{command}'.format(command=args.command),
fromlist=['yuno.' + args.command]
)
program.main(subcommand_args)
def load_settings(runtime_settings, plugin_name):
plugin_name = re.sub('[^a-z0-9_]', '', plugin_name, flags=re.I)
plugin_settings_file = 'yuno/%s/settings/config.json' % plugin_name
config.load_default()
if os.path.isfile(plugin_settings_file):
config.load_json(plugin_settings_file)
for override in runtime_settings or []:
key = override[0]
if isinstance(getattr(config.config, key), list):
value = override[1:]
else:
value = override[1]
config.update(key, value)
if __name__ == '__main__':
main()
|
Resolve symlinks when detecting YUNO_HOME.
|
## Code Before:
import os
import re
import sys
from yuno.core import cli, config
from yuno.core.util import working_dir
def main(argv=None):
# Figure out where Yuno lives so plugins can cd correctly if they need to.
yuno_home = os.path.abspath(os.path.dirname(__file__))
config.update('YUNO_HOME', yuno_home)
with working_dir(yuno_home):
args, subcommand_args = cli.get_cli_args()
load_settings(args.runtime_settings, args.command)
program = __import__(
'yuno.{command}.{command}'.format(command=args.command),
fromlist=['yuno.' + args.command]
)
program.main(subcommand_args)
def load_settings(runtime_settings, plugin_name):
plugin_name = re.sub('[^a-z0-9_]', '', plugin_name, flags=re.I)
plugin_settings_file = 'yuno/%s/settings/config.json' % plugin_name
config.load_default()
if os.path.isfile(plugin_settings_file):
config.load_json(plugin_settings_file)
for override in runtime_settings or []:
key = override[0]
if isinstance(getattr(config.config, key), list):
value = override[1:]
else:
value = override[1]
config.update(key, value)
if __name__ == '__main__':
main()
## Instruction:
Resolve symlinks when detecting YUNO_HOME.
## Code After:
import os
import re
import sys
from yuno.core import cli, config
from yuno.core.util import working_dir
def main(argv=None):
# Figure out where Yuno lives so plugins can cd correctly if they need to.
yuno_home = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
config.update('YUNO_HOME', yuno_home)
with working_dir(yuno_home):
args, subcommand_args = cli.get_cli_args()
load_settings(args.runtime_settings, args.command)
program = __import__(
'yuno.{command}.{command}'.format(command=args.command),
fromlist=['yuno.' + args.command]
)
program.main(subcommand_args)
def load_settings(runtime_settings, plugin_name):
plugin_name = re.sub('[^a-z0-9_]', '', plugin_name, flags=re.I)
plugin_settings_file = 'yuno/%s/settings/config.json' % plugin_name
config.load_default()
if os.path.isfile(plugin_settings_file):
config.load_json(plugin_settings_file)
for override in runtime_settings or []:
key = override[0]
if isinstance(getattr(config.config, key), list):
value = override[1:]
else:
value = override[1]
config.update(key, value)
if __name__ == '__main__':
main()
|
import os
import re
import sys
from yuno.core import cli, config
from yuno.core.util import working_dir
def main(argv=None):
# Figure out where Yuno lives so plugins can cd correctly if they need to.
- yuno_home = os.path.abspath(os.path.dirname(__file__))
+ yuno_home = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
? +++++++++++++++++ +
config.update('YUNO_HOME', yuno_home)
with working_dir(yuno_home):
args, subcommand_args = cli.get_cli_args()
load_settings(args.runtime_settings, args.command)
program = __import__(
'yuno.{command}.{command}'.format(command=args.command),
fromlist=['yuno.' + args.command]
)
program.main(subcommand_args)
def load_settings(runtime_settings, plugin_name):
plugin_name = re.sub('[^a-z0-9_]', '', plugin_name, flags=re.I)
plugin_settings_file = 'yuno/%s/settings/config.json' % plugin_name
config.load_default()
if os.path.isfile(plugin_settings_file):
config.load_json(plugin_settings_file)
for override in runtime_settings or []:
key = override[0]
if isinstance(getattr(config.config, key), list):
value = override[1:]
else:
value = override[1]
config.update(key, value)
if __name__ == '__main__':
main()
|
13ba81df82f2c43838066ec9cd0fa1222324349f
|
srsly/util.py
|
srsly/util.py
|
from __future__ import unicode_literals
from pathlib import Path
import sys
def force_path(location, require_exists=True):
if not isinstance(location, Path):
location = Path(location)
if require_exists and not location.exists():
raise ValueError("Can't read file: {}".format(location))
return location
def force_string(location):
if sys.version_info[0] == 2: # Python 2
return str(location).decode("utf8")
return str(location)
|
from __future__ import unicode_literals
from pathlib import Path
import sys
is_python2 = sys.version_info[0] == 2
is_python3 = sys.version_info[0] == 3
if is_python2:
basestring_ = basestring # noqa: F821
else:
basestring_ = str
def force_path(location, require_exists=True):
if not isinstance(location, Path):
location = Path(location)
if require_exists and not location.exists():
raise ValueError("Can't read file: {}".format(location))
return location
def force_string(location):
if isinstance(location, basestring_):
return location
if sys.version_info[0] == 2: # Python 2
return str(location).decode("utf8")
return str(location)
|
Improve compat handling in force_string
|
Improve compat handling in force_string
If we know we already have a string, no need to force it into a strinbg
|
Python
|
mit
|
explosion/srsly,explosion/srsly,explosion/srsly,explosion/srsly
|
from __future__ import unicode_literals
from pathlib import Path
import sys
+
+
+ is_python2 = sys.version_info[0] == 2
+ is_python3 = sys.version_info[0] == 3
+
+ if is_python2:
+ basestring_ = basestring # noqa: F821
+ else:
+ basestring_ = str
def force_path(location, require_exists=True):
if not isinstance(location, Path):
location = Path(location)
if require_exists and not location.exists():
raise ValueError("Can't read file: {}".format(location))
return location
def force_string(location):
+ if isinstance(location, basestring_):
+ return location
if sys.version_info[0] == 2: # Python 2
return str(location).decode("utf8")
return str(location)
|
Improve compat handling in force_string
|
## Code Before:
from __future__ import unicode_literals
from pathlib import Path
import sys
def force_path(location, require_exists=True):
if not isinstance(location, Path):
location = Path(location)
if require_exists and not location.exists():
raise ValueError("Can't read file: {}".format(location))
return location
def force_string(location):
if sys.version_info[0] == 2: # Python 2
return str(location).decode("utf8")
return str(location)
## Instruction:
Improve compat handling in force_string
## Code After:
from __future__ import unicode_literals
from pathlib import Path
import sys
is_python2 = sys.version_info[0] == 2
is_python3 = sys.version_info[0] == 3
if is_python2:
basestring_ = basestring # noqa: F821
else:
basestring_ = str
def force_path(location, require_exists=True):
if not isinstance(location, Path):
location = Path(location)
if require_exists and not location.exists():
raise ValueError("Can't read file: {}".format(location))
return location
def force_string(location):
if isinstance(location, basestring_):
return location
if sys.version_info[0] == 2: # Python 2
return str(location).decode("utf8")
return str(location)
|
from __future__ import unicode_literals
from pathlib import Path
import sys
+
+
+ is_python2 = sys.version_info[0] == 2
+ is_python3 = sys.version_info[0] == 3
+
+ if is_python2:
+ basestring_ = basestring # noqa: F821
+ else:
+ basestring_ = str
def force_path(location, require_exists=True):
if not isinstance(location, Path):
location = Path(location)
if require_exists and not location.exists():
raise ValueError("Can't read file: {}".format(location))
return location
def force_string(location):
+ if isinstance(location, basestring_):
+ return location
if sys.version_info[0] == 2: # Python 2
return str(location).decode("utf8")
return str(location)
|
e0b82cf9ed24870cb313328e5539acc5fe7f6508
|
stock_awesome/levels/chock_a_block.py
|
stock_awesome/levels/chock_a_block.py
|
import time
from stock_awesome.obj import market
def main():
"""
Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask
price.
"""
m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM')
#collection of orders placed
orders = {}
filled = 0
upper_limit = 3300
#try to buy 100000
to_send = 1000
while to_send > 0:
quote = m.quote()
ask = quote.get('ask')
if ask and ask < upper_limit:
r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill')
to_send -= 1
orders[r['id']] = r
orders = update_orders(m, orders)
filled += update_filled(orders)
else:
time.sleep(1)
def update_orders(m, orders):
"""
update order status
"""
return {o: m.order_status(o) for o in orders}
def update_filled(orders):
"""
Remove filled orders and update our count.
"""
closed = [o for o in orders if not orders[o]['open']]
#remove and sum filled orders
filled = sum(orders.pop(o)['totalFilled'] for o in closed)
return filled
if __name__ == '__main__':
main()
|
import time
from stock_awesome.obj import market
def main():
"""
Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask
price.
"""
m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO')
#collection of orders placed
orders = {}
filled = 0
upper_limit = 2450
#try to buy 100000
to_buy = 100000
while to_buy > 0:
quote = m.quote()
ask = quote.get('ask', 0)
bid = quote.get('bid')
if ask < upper_limit:
r = m.buy(quote['askSize'], ask, order_type='fill-or-kill')
to_buy -= r['totalFilled']
print("Bought {}, {} remaining".format(r['totalFilled'], to_buy))
else:
time.sleep(1)
print('done')
def update_orders(m, orders):
"""
update order status
"""
return {o: m.order_status(o) for o in orders}
def update_filled(orders):
"""
Remove filled orders and update our count.
"""
closed = [o for o in orders if not orders[o]['open']]
#remove and sum filled orders
filled = sum(orders.pop(o)['totalFilled'] for o in closed)
return filled
if __name__ == '__main__':
main()
|
Add some (inefective) score maximizing attempts
|
Add some (inefective) score maximizing attempts
|
Python
|
mit
|
ForeverWintr/stock_awesome
|
import time
from stock_awesome.obj import market
def main():
"""
Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask
price.
"""
- m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM')
+ m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO')
#collection of orders placed
orders = {}
filled = 0
- upper_limit = 3300
+ upper_limit = 2450
#try to buy 100000
- to_send = 1000
+ to_buy = 100000
- while to_send > 0:
+ while to_buy > 0:
quote = m.quote()
- ask = quote.get('ask')
+ ask = quote.get('ask', 0)
+ bid = quote.get('bid')
- if ask and ask < upper_limit:
+ if ask < upper_limit:
- r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill')
+ r = m.buy(quote['askSize'], ask, order_type='fill-or-kill')
- to_send -= 1
+ to_buy -= r['totalFilled']
+ print("Bought {}, {} remaining".format(r['totalFilled'], to_buy))
- orders[r['id']] = r
-
- orders = update_orders(m, orders)
- filled += update_filled(orders)
else:
time.sleep(1)
-
+ print('done')
def update_orders(m, orders):
"""
update order status
"""
return {o: m.order_status(o) for o in orders}
def update_filled(orders):
"""
Remove filled orders and update our count.
"""
closed = [o for o in orders if not orders[o]['open']]
#remove and sum filled orders
filled = sum(orders.pop(o)['totalFilled'] for o in closed)
return filled
if __name__ == '__main__':
main()
|
Add some (inefective) score maximizing attempts
|
## Code Before:
import time
from stock_awesome.obj import market
def main():
"""
Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask
price.
"""
m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM')
#collection of orders placed
orders = {}
filled = 0
upper_limit = 3300
#try to buy 100000
to_send = 1000
while to_send > 0:
quote = m.quote()
ask = quote.get('ask')
if ask and ask < upper_limit:
r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill')
to_send -= 1
orders[r['id']] = r
orders = update_orders(m, orders)
filled += update_filled(orders)
else:
time.sleep(1)
def update_orders(m, orders):
"""
update order status
"""
return {o: m.order_status(o) for o in orders}
def update_filled(orders):
"""
Remove filled orders and update our count.
"""
closed = [o for o in orders if not orders[o]['open']]
#remove and sum filled orders
filled = sum(orders.pop(o)['totalFilled'] for o in closed)
return filled
if __name__ == '__main__':
main()
## Instruction:
Add some (inefective) score maximizing attempts
## Code After:
import time
from stock_awesome.obj import market
def main():
"""
Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask
price.
"""
m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO')
#collection of orders placed
orders = {}
filled = 0
upper_limit = 2450
#try to buy 100000
to_buy = 100000
while to_buy > 0:
quote = m.quote()
ask = quote.get('ask', 0)
bid = quote.get('bid')
if ask < upper_limit:
r = m.buy(quote['askSize'], ask, order_type='fill-or-kill')
to_buy -= r['totalFilled']
print("Bought {}, {} remaining".format(r['totalFilled'], to_buy))
else:
time.sleep(1)
print('done')
def update_orders(m, orders):
"""
update order status
"""
return {o: m.order_status(o) for o in orders}
def update_filled(orders):
"""
Remove filled orders and update our count.
"""
closed = [o for o in orders if not orders[o]['open']]
#remove and sum filled orders
filled = sum(orders.pop(o)['totalFilled'] for o in closed)
return filled
if __name__ == '__main__':
main()
|
import time
from stock_awesome.obj import market
def main():
"""
Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask
price.
"""
- m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM')
+ m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO')
#collection of orders placed
orders = {}
filled = 0
- upper_limit = 3300
? ^^ -
+ upper_limit = 2450
? ^^^
#try to buy 100000
- to_send = 1000
? ^^^^
+ to_buy = 100000
? ^^^ ++
- while to_send > 0:
? ^^^^
+ while to_buy > 0:
? ^^^
quote = m.quote()
- ask = quote.get('ask')
+ ask = quote.get('ask', 0)
? +++
+ bid = quote.get('bid')
- if ask and ask < upper_limit:
? --------
+ if ask < upper_limit:
- r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill')
? ------- --
+ r = m.buy(quote['askSize'], ask, order_type='fill-or-kill')
- to_send -= 1
+ to_buy -= r['totalFilled']
+ print("Bought {}, {} remaining".format(r['totalFilled'], to_buy))
- orders[r['id']] = r
-
- orders = update_orders(m, orders)
- filled += update_filled(orders)
else:
time.sleep(1)
-
+ print('done')
def update_orders(m, orders):
"""
update order status
"""
return {o: m.order_status(o) for o in orders}
def update_filled(orders):
"""
Remove filled orders and update our count.
"""
closed = [o for o in orders if not orders[o]['open']]
#remove and sum filled orders
filled = sum(orders.pop(o)['totalFilled'] for o in closed)
return filled
if __name__ == '__main__':
main()
|
aaaaac53d996ff5ed1f39cbed583079e26150443
|
falcom/api/hathi/from_json.py
|
falcom/api/hathi/from_json.py
|
import json
from .data import HathiData
def load_json (json_data):
try:
return json.loads(json_data)
except:
return { }
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
import json
from .data import HathiData
EMPTY_JSON_DATA = { }
def load_json (json_data):
try:
return json.loads(json_data)
except:
return EMPTY_JSON_DATA
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
Add named constant to explain why { } default
|
Add named constant to explain why { } default
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
import json
from .data import HathiData
+
+ EMPTY_JSON_DATA = { }
def load_json (json_data):
try:
return json.loads(json_data)
except:
- return { }
+ return EMPTY_JSON_DATA
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
Add named constant to explain why { } default
|
## Code Before:
import json
from .data import HathiData
def load_json (json_data):
try:
return json.loads(json_data)
except:
return { }
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
## Instruction:
Add named constant to explain why { } default
## Code After:
import json
from .data import HathiData
EMPTY_JSON_DATA = { }
def load_json (json_data):
try:
return json.loads(json_data)
except:
return EMPTY_JSON_DATA
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
import json
from .data import HathiData
+
+ EMPTY_JSON_DATA = { }
def load_json (json_data):
try:
return json.loads(json_data)
except:
- return { }
+ return EMPTY_JSON_DATA
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
bdda5e565981ac26a7e5e1ab8d1486eb91b09e4c
|
views/base.py
|
views/base.py
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
def handler(request, path):
page = Page.objects.page_for_path_or_404(path)
if page.override_url:
return HttpResponseRedirect(page.override_url)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.override_url:
return HttpResponseRedirect(page.override_url)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
Use request.path if no path was passed to the default view
|
Use request.path if no path was passed to the default view
|
Python
|
bsd-3-clause
|
mjl/feincms,nickburlett/feincms,nickburlett/feincms,pjdelport/feincms,matthiask/django-content-editor,michaelkuty/feincms,matthiask/feincms2-content,mjl/feincms,matthiask/django-content-editor,matthiask/django-content-editor,mjl/feincms,hgrimelid/feincms,hgrimelid/feincms,joshuajonah/feincms,nickburlett/feincms,matthiask/django-content-editor,joshuajonah/feincms,joshuajonah/feincms,pjdelport/feincms,michaelkuty/feincms,matthiask/feincms2-content,feincms/feincms,pjdelport/feincms,feincms/feincms,michaelkuty/feincms,nickburlett/feincms,joshuajonah/feincms,feincms/feincms,matthiask/feincms2-content,michaelkuty/feincms,hgrimelid/feincms
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
- def handler(request, path):
+ def handler(request, path=None):
+ if path is None:
+ path = request.path
+
page = Page.objects.page_for_path_or_404(path)
if page.override_url:
return HttpResponseRedirect(page.override_url)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
Use request.path if no path was passed to the default view
|
## Code Before:
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
def handler(request, path):
page = Page.objects.page_for_path_or_404(path)
if page.override_url:
return HttpResponseRedirect(page.override_url)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
## Instruction:
Use request.path if no path was passed to the default view
## Code After:
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.override_url:
return HttpResponseRedirect(page.override_url)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
- def handler(request, path):
+ def handler(request, path=None):
? +++++
+ if path is None:
+ path = request.path
+
page = Page.objects.page_for_path_or_404(path)
if page.override_url:
return HttpResponseRedirect(page.override_url)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
745c9445e16f72dbc1791abef2b7f52eb5e1f093
|
open_spiel/python/tests/referee_test.py
|
open_spiel/python/tests/referee_test.py
|
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee("kuhn_poker",
[f"{base}/random_bot_py.sh",
f"{base}/random_bot_cpp.sh"])
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee(
"kuhn_poker",
[f"{base}/random_bot_py.sh", f"{base}/random_bot_cpp.sh"],
settings=pyspiel.TournamentSettings(timeout_ready=2000,
timeout_start=2000)
)
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
Increase timeouts for the python test.
|
Increase timeouts for the python test.
|
Python
|
apache-2.0
|
deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel
|
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
- ref = pyspiel.Referee("kuhn_poker",
+ ref = pyspiel.Referee(
- [f"{base}/random_bot_py.sh",
- f"{base}/random_bot_cpp.sh"])
+ "kuhn_poker",
+ [f"{base}/random_bot_py.sh", f"{base}/random_bot_cpp.sh"],
+ settings=pyspiel.TournamentSettings(timeout_ready=2000,
+ timeout_start=2000)
+ )
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
Increase timeouts for the python test.
|
## Code Before:
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee("kuhn_poker",
[f"{base}/random_bot_py.sh",
f"{base}/random_bot_cpp.sh"])
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
## Instruction:
Increase timeouts for the python test.
## Code After:
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee(
"kuhn_poker",
[f"{base}/random_bot_py.sh", f"{base}/random_bot_cpp.sh"],
settings=pyspiel.TournamentSettings(timeout_ready=2000,
timeout_start=2000)
)
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
- ref = pyspiel.Referee("kuhn_poker",
? -------------
+ ref = pyspiel.Referee(
- [f"{base}/random_bot_py.sh",
- f"{base}/random_bot_cpp.sh"])
+ "kuhn_poker",
+ [f"{base}/random_bot_py.sh", f"{base}/random_bot_cpp.sh"],
+ settings=pyspiel.TournamentSettings(timeout_ready=2000,
+ timeout_start=2000)
+ )
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
325fed2ef774e708e96d1b123672e1be238d7d21
|
nailgun/nailgun/models.py
|
nailgun/nailgun/models.py
|
from django.db import models
from django.contrib.auth.models import User
from jsonfield import JSONField
class Environment(models.Model):
#user = models.ForeignKey(User, related_name='environments')
name = models.CharField(max_length=100)
class Role(models.Model):
id = models.CharField(max_length=30, primary_key=True)
name = models.CharField(max_length=50)
class Node(models.Model):
NODE_STATUSES = (
('online', 'online'),
('offline', 'offline'),
('busy', 'busy'),
)
environment = models.ForeignKey(Environment, related_name='nodes')
name = models.CharField(max_length=100, primary_key=True)
status = models.CharField(max_length=30, choices=NODE_STATUSES,
default='online')
metadata = JSONField()
roles = models.ManyToManyField(Role, related_name='nodes')
|
from django.db import models
from django.contrib.auth.models import User
from jsonfield import JSONField
class Environment(models.Model):
#user = models.ForeignKey(User, related_name='environments')
name = models.CharField(max_length=100)
class Role(models.Model):
id = models.CharField(max_length=30, primary_key=True)
name = models.CharField(max_length=50)
class Node(models.Model):
NODE_STATUSES = (
('online', 'online'),
('offline', 'offline'),
('busy', 'busy'),
)
environment = models.ForeignKey(Environment, related_name='nodes',
null=True, blank=True, on_delete=models.SET_NULL)
name = models.CharField(max_length=100, primary_key=True)
status = models.CharField(max_length=30, choices=NODE_STATUSES,
default='online')
metadata = JSONField()
roles = models.ManyToManyField(Role, related_name='nodes')
|
Allow nodes not to have related environment
|
Allow nodes not to have related environment
|
Python
|
apache-2.0
|
SmartInfrastructures/fuel-main-dev,nebril/fuel-web,dancn/fuel-main-dev,SergK/fuel-main,zhaochao/fuel-main,nebril/fuel-web,prmtl/fuel-web,zhaochao/fuel-main,eayunstack/fuel-web,nebril/fuel-web,SmartInfrastructures/fuel-main-dev,SmartInfrastructures/fuel-main-dev,teselkin/fuel-main,Fiware/ops.Fuel-main-dev,teselkin/fuel-main,zhaochao/fuel-web,dancn/fuel-main-dev,eayunstack/fuel-web,stackforge/fuel-web,huntxu/fuel-web,SmartInfrastructures/fuel-web-dev,prmtl/fuel-web,eayunstack/fuel-web,zhaochao/fuel-main,teselkin/fuel-main,SmartInfrastructures/fuel-web-dev,SmartInfrastructures/fuel-web-dev,stackforge/fuel-web,Fiware/ops.Fuel-main-dev,AnselZhangGit/fuel-main,nebril/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-web,dancn/fuel-main-dev,zhaochao/fuel-web,dancn/fuel-main-dev,koder-ua/nailgun-fcert,zhaochao/fuel-main,AnselZhangGit/fuel-main,teselkin/fuel-main,prmtl/fuel-web,huntxu/fuel-web,koder-ua/nailgun-fcert,SergK/fuel-main,huntxu/fuel-main,koder-ua/nailgun-fcert,huntxu/fuel-main,huntxu/fuel-main,stackforge/fuel-main,eayunstack/fuel-main,SmartInfrastructures/fuel-main-dev,ddepaoli3/fuel-main-dev,Fiware/ops.Fuel-main-dev,AnselZhangGit/fuel-main,stackforge/fuel-web,SergK/fuel-main,zhaochao/fuel-web,ddepaoli3/fuel-main-dev,SmartInfrastructures/fuel-web-dev,ddepaoli3/fuel-main-dev,AnselZhangGit/fuel-main,huntxu/fuel-web,nebril/fuel-web,prmtl/fuel-web,huntxu/fuel-web,eayunstack/fuel-main,huntxu/fuel-web,zhaochao/fuel-main,zhaochao/fuel-web,stackforge/fuel-main,ddepaoli3/fuel-main-dev,prmtl/fuel-web,zhaochao/fuel-web,stackforge/fuel-main,koder-ua/nailgun-fcert,eayunstack/fuel-web,eayunstack/fuel-main,Fiware/ops.Fuel-main-dev
|
from django.db import models
from django.contrib.auth.models import User
from jsonfield import JSONField
class Environment(models.Model):
#user = models.ForeignKey(User, related_name='environments')
name = models.CharField(max_length=100)
class Role(models.Model):
id = models.CharField(max_length=30, primary_key=True)
name = models.CharField(max_length=50)
class Node(models.Model):
NODE_STATUSES = (
('online', 'online'),
('offline', 'offline'),
('busy', 'busy'),
)
- environment = models.ForeignKey(Environment, related_name='nodes')
+ environment = models.ForeignKey(Environment, related_name='nodes',
+ null=True, blank=True, on_delete=models.SET_NULL)
name = models.CharField(max_length=100, primary_key=True)
status = models.CharField(max_length=30, choices=NODE_STATUSES,
default='online')
metadata = JSONField()
roles = models.ManyToManyField(Role, related_name='nodes')
|
Allow nodes not to have related environment
|
## Code Before:
from django.db import models
from django.contrib.auth.models import User
from jsonfield import JSONField
class Environment(models.Model):
#user = models.ForeignKey(User, related_name='environments')
name = models.CharField(max_length=100)
class Role(models.Model):
id = models.CharField(max_length=30, primary_key=True)
name = models.CharField(max_length=50)
class Node(models.Model):
NODE_STATUSES = (
('online', 'online'),
('offline', 'offline'),
('busy', 'busy'),
)
environment = models.ForeignKey(Environment, related_name='nodes')
name = models.CharField(max_length=100, primary_key=True)
status = models.CharField(max_length=30, choices=NODE_STATUSES,
default='online')
metadata = JSONField()
roles = models.ManyToManyField(Role, related_name='nodes')
## Instruction:
Allow nodes not to have related environment
## Code After:
from django.db import models
from django.contrib.auth.models import User
from jsonfield import JSONField
class Environment(models.Model):
#user = models.ForeignKey(User, related_name='environments')
name = models.CharField(max_length=100)
class Role(models.Model):
id = models.CharField(max_length=30, primary_key=True)
name = models.CharField(max_length=50)
class Node(models.Model):
NODE_STATUSES = (
('online', 'online'),
('offline', 'offline'),
('busy', 'busy'),
)
environment = models.ForeignKey(Environment, related_name='nodes',
null=True, blank=True, on_delete=models.SET_NULL)
name = models.CharField(max_length=100, primary_key=True)
status = models.CharField(max_length=30, choices=NODE_STATUSES,
default='online')
metadata = JSONField()
roles = models.ManyToManyField(Role, related_name='nodes')
|
from django.db import models
from django.contrib.auth.models import User
from jsonfield import JSONField
class Environment(models.Model):
#user = models.ForeignKey(User, related_name='environments')
name = models.CharField(max_length=100)
class Role(models.Model):
id = models.CharField(max_length=30, primary_key=True)
name = models.CharField(max_length=50)
class Node(models.Model):
NODE_STATUSES = (
('online', 'online'),
('offline', 'offline'),
('busy', 'busy'),
)
- environment = models.ForeignKey(Environment, related_name='nodes')
? ^
+ environment = models.ForeignKey(Environment, related_name='nodes',
? ^
+ null=True, blank=True, on_delete=models.SET_NULL)
name = models.CharField(max_length=100, primary_key=True)
status = models.CharField(max_length=30, choices=NODE_STATUSES,
default='online')
metadata = JSONField()
roles = models.ManyToManyField(Role, related_name='nodes')
|
4261aad86b40d052906b8162263e00aa7b12b5e7
|
pritunl_node/call_buffer.py
|
pritunl_node/call_buffer.py
|
from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
if self.waiter:
self.waiter([])
self.waiter = None
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
def return_call(self, id, response):
callback = self.call_waiters.pop(id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
|
from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
self.stop_waiter()
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
def stop_waiter(self):
if self.waiter:
self.waiter(None)
self.waiter = None
def return_call(self, id, response):
callback = self.call_waiters.pop(id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
|
Add stop waiter to call buffer
|
Add stop waiter to call buffer
|
Python
|
agpl-3.0
|
pritunl/pritunl-node,pritunl/pritunl-node
|
from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
- if self.waiter:
- self.waiter([])
+ self.stop_waiter()
- self.waiter = None
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
+
+ def stop_waiter(self):
+ if self.waiter:
+ self.waiter(None)
+ self.waiter = None
def return_call(self, id, response):
callback = self.call_waiters.pop(id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
|
Add stop waiter to call buffer
|
## Code Before:
from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
if self.waiter:
self.waiter([])
self.waiter = None
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
def return_call(self, id, response):
callback = self.call_waiters.pop(id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
## Instruction:
Add stop waiter to call buffer
## Code After:
from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
self.stop_waiter()
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
def stop_waiter(self):
if self.waiter:
self.waiter(None)
self.waiter = None
def return_call(self, id, response):
callback = self.call_waiters.pop(id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
|
from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
- if self.waiter:
- self.waiter([])
? ---- --
+ self.stop_waiter()
? +++++
- self.waiter = None
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
+
+ def stop_waiter(self):
+ if self.waiter:
+ self.waiter(None)
+ self.waiter = None
def return_call(self, id, response):
callback = self.call_waiters.pop(id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
|
6de7d5059d6d5fd2569f108e83fff0ae979aad89
|
train_twitter_data.py
|
train_twitter_data.py
|
from sklearn.datasets import load_files
from sklearn.feature_extraction.text import CountVectorizer
categories = ['neg', 'pos']
twitter_train = load_files('./twitter_data/twitter_data-train', categories=categories, load_content=True, shuffle=True, random_state=42)
count_vect = CountVectorizer()
X_train_counts = count_vect.fit_transform(twitter_train.data)
print(X_train_counts.shape)
|
from sklearn.datasets import load_files
from sklearn.feature_extraction.text import CountVectorizer
categories = ['neg', 'pos']
twitter_train = load_files('./twitter_data/twitter_data-train', categories=categories, load_content=True, shuffle=True, random_state=42)
#Ignoring decode errors may harm our results, but at least it works now
count_vect = CountVectorizer(decode_error='ignore')
X_train_counts = count_vect.fit_transform(twitter_train.data)
print(X_train_counts.shape)
|
Make vectorizer Ignore decode errors
|
Make vectorizer Ignore decode errors
This isn't ideal and could harm our results, but it actually runs now. Figuring out the proper encoding would be better if possible.
|
Python
|
apache-2.0
|
ngrudzinski/sentiment_analysis_437
|
from sklearn.datasets import load_files
from sklearn.feature_extraction.text import CountVectorizer
categories = ['neg', 'pos']
twitter_train = load_files('./twitter_data/twitter_data-train', categories=categories, load_content=True, shuffle=True, random_state=42)
- count_vect = CountVectorizer()
+ #Ignoring decode errors may harm our results, but at least it works now
+ count_vect = CountVectorizer(decode_error='ignore')
X_train_counts = count_vect.fit_transform(twitter_train.data)
print(X_train_counts.shape)
|
Make vectorizer Ignore decode errors
|
## Code Before:
from sklearn.datasets import load_files
from sklearn.feature_extraction.text import CountVectorizer
categories = ['neg', 'pos']
twitter_train = load_files('./twitter_data/twitter_data-train', categories=categories, load_content=True, shuffle=True, random_state=42)
count_vect = CountVectorizer()
X_train_counts = count_vect.fit_transform(twitter_train.data)
print(X_train_counts.shape)
## Instruction:
Make vectorizer Ignore decode errors
## Code After:
from sklearn.datasets import load_files
from sklearn.feature_extraction.text import CountVectorizer
categories = ['neg', 'pos']
twitter_train = load_files('./twitter_data/twitter_data-train', categories=categories, load_content=True, shuffle=True, random_state=42)
#Ignoring decode errors may harm our results, but at least it works now
count_vect = CountVectorizer(decode_error='ignore')
X_train_counts = count_vect.fit_transform(twitter_train.data)
print(X_train_counts.shape)
|
from sklearn.datasets import load_files
from sklearn.feature_extraction.text import CountVectorizer
categories = ['neg', 'pos']
twitter_train = load_files('./twitter_data/twitter_data-train', categories=categories, load_content=True, shuffle=True, random_state=42)
- count_vect = CountVectorizer()
+ #Ignoring decode errors may harm our results, but at least it works now
+ count_vect = CountVectorizer(decode_error='ignore')
X_train_counts = count_vect.fit_transform(twitter_train.data)
print(X_train_counts.shape)
|
4987b2e5a2d5ee208a274702f6b88a9021149c86
|
tests/blueprints/user_message/test_address_formatting.py
|
tests/blueprints/user_message/test_address_formatting.py
|
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(application, params):
screen_name, email_address, expected = params
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
@pytest.fixture
def application(db):
with app_context():
with database_recreated(db):
yield
|
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
@pytest.fixture(scope='module')
def site(db):
with app_context():
with database_recreated(db):
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
yield site
|
Speed up user message address formatting test
|
Speed up user message address formatting test
The common set-up is moved to the fixture, then the fixture's scope is
widened so that it is used for all test cases in the module, avoiding
duplicate work.
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
|
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
- def test_recipient_formatting(application, params):
+ def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
-
- create_email_config()
-
- brand = create_brand()
- party = create_party(brand.id)
-
- site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
- @pytest.fixture
- def application(db):
+ @pytest.fixture(scope='module')
+ def site(db):
with app_context():
with database_recreated(db):
- yield
+ create_email_config()
+ brand = create_brand()
+ party = create_party(brand.id)
+
+ site = create_site(party.id)
+
+ yield site
+
|
Speed up user message address formatting test
|
## Code Before:
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(application, params):
screen_name, email_address, expected = params
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
@pytest.fixture
def application(db):
with app_context():
with database_recreated(db):
yield
## Instruction:
Speed up user message address formatting test
## Code After:
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
@pytest.fixture(scope='module')
def site(db):
with app_context():
with database_recreated(db):
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
yield site
|
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
- def test_recipient_formatting(application, params):
? ^^^^ -- ^^^
+ def test_recipient_formatting(site, params):
? ^ ^
screen_name, email_address, expected = params
-
- create_email_config()
-
- brand = create_brand()
- party = create_party(brand.id)
-
- site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
- @pytest.fixture
- def application(db):
+ @pytest.fixture(scope='module')
+ def site(db):
with app_context():
with database_recreated(db):
+ create_email_config()
+
+ brand = create_brand()
+ party = create_party(brand.id)
+
+ site = create_site(party.id)
+
- yield
+ yield site
? +++++
|
38ceb6d04f7b09b3ab29468c2fa9ccc94e1b5dc5
|
casepro/pods/views.py
|
casepro/pods/views.py
|
from __future__ import unicode_literals
import json
from django.http import JsonResponse
from casepro.pods import registry
def read_pod_data(request, index):
"""Delegates to the `read_data` function of the correct pod."""
if request.method != 'GET':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
return JsonResponse(pod.read_data(request.GET))
def perform_pod_action(request, index):
"""Deletegates to the `perform_action` function of the correct pod."""
if request.method != 'POST':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
try:
data = json.loads(request.body)
except ValueError as e:
return JsonResponse({'reason': 'JSON decode error', 'details': e.message}, status=400)
return JsonResponse(pod.perform_action(data))
|
from __future__ import unicode_literals
import json
from django.http import JsonResponse
from casepro.cases.models import Case, CaseAction
from casepro.pods import registry
def read_pod_data(request, index):
"""Delegates to the `read_data` function of the correct pod."""
if request.method != 'GET':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
return JsonResponse(pod.read_data(request.GET))
def perform_pod_action(request, index):
"""Deletegates to the `perform_action` function of the correct pod."""
if request.method != 'POST':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
try:
data = json.loads(request.body)
except ValueError as e:
return JsonResponse({'reason': 'JSON decode error', 'details': e.message}, status=400)
case_id = data.get('case_id')
if case_id is None:
return JsonResponse(
{'reason': 'Request object needs to have a "case_id" field'}, status=400)
action_data = data.get('action', {})
success, payload = pod.perform_action(action_data.get('type'), action_data.get('payload', {}))
if success is True:
case = Case.objects.get(id=case_id)
CaseAction.create(case, request.user, CaseAction.ADD_NOTE, note=payload.get('message'))
return JsonResponse(pod.perform_action(data))
|
Change case field to case_id in error message
|
Change case field to case_id in error message
|
Python
|
bsd-3-clause
|
xkmato/casepro,praekelt/casepro,rapidpro/casepro,rapidpro/casepro,rapidpro/casepro,praekelt/casepro,xkmato/casepro,praekelt/casepro
|
from __future__ import unicode_literals
import json
from django.http import JsonResponse
+ from casepro.cases.models import Case, CaseAction
from casepro.pods import registry
def read_pod_data(request, index):
"""Delegates to the `read_data` function of the correct pod."""
if request.method != 'GET':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
return JsonResponse(pod.read_data(request.GET))
def perform_pod_action(request, index):
"""Deletegates to the `perform_action` function of the correct pod."""
if request.method != 'POST':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
try:
data = json.loads(request.body)
except ValueError as e:
return JsonResponse({'reason': 'JSON decode error', 'details': e.message}, status=400)
+ case_id = data.get('case_id')
+ if case_id is None:
+ return JsonResponse(
+ {'reason': 'Request object needs to have a "case_id" field'}, status=400)
+
+ action_data = data.get('action', {})
+ success, payload = pod.perform_action(action_data.get('type'), action_data.get('payload', {}))
+ if success is True:
+ case = Case.objects.get(id=case_id)
+ CaseAction.create(case, request.user, CaseAction.ADD_NOTE, note=payload.get('message'))
+
return JsonResponse(pod.perform_action(data))
|
Change case field to case_id in error message
|
## Code Before:
from __future__ import unicode_literals
import json
from django.http import JsonResponse
from casepro.pods import registry
def read_pod_data(request, index):
"""Delegates to the `read_data` function of the correct pod."""
if request.method != 'GET':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
return JsonResponse(pod.read_data(request.GET))
def perform_pod_action(request, index):
"""Deletegates to the `perform_action` function of the correct pod."""
if request.method != 'POST':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
try:
data = json.loads(request.body)
except ValueError as e:
return JsonResponse({'reason': 'JSON decode error', 'details': e.message}, status=400)
return JsonResponse(pod.perform_action(data))
## Instruction:
Change case field to case_id in error message
## Code After:
from __future__ import unicode_literals
import json
from django.http import JsonResponse
from casepro.cases.models import Case, CaseAction
from casepro.pods import registry
def read_pod_data(request, index):
"""Delegates to the `read_data` function of the correct pod."""
if request.method != 'GET':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
return JsonResponse(pod.read_data(request.GET))
def perform_pod_action(request, index):
"""Deletegates to the `perform_action` function of the correct pod."""
if request.method != 'POST':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
try:
data = json.loads(request.body)
except ValueError as e:
return JsonResponse({'reason': 'JSON decode error', 'details': e.message}, status=400)
case_id = data.get('case_id')
if case_id is None:
return JsonResponse(
{'reason': 'Request object needs to have a "case_id" field'}, status=400)
action_data = data.get('action', {})
success, payload = pod.perform_action(action_data.get('type'), action_data.get('payload', {}))
if success is True:
case = Case.objects.get(id=case_id)
CaseAction.create(case, request.user, CaseAction.ADD_NOTE, note=payload.get('message'))
return JsonResponse(pod.perform_action(data))
|
from __future__ import unicode_literals
import json
from django.http import JsonResponse
+ from casepro.cases.models import Case, CaseAction
from casepro.pods import registry
def read_pod_data(request, index):
"""Delegates to the `read_data` function of the correct pod."""
if request.method != 'GET':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
return JsonResponse(pod.read_data(request.GET))
def perform_pod_action(request, index):
"""Deletegates to the `perform_action` function of the correct pod."""
if request.method != 'POST':
return JsonResponse({'reason': 'Method not allowed'}, status=405)
try:
pod = registry.pods[int(index)]
except IndexError:
return JsonResponse({'reason': 'Pod does not exist'}, status=404)
try:
data = json.loads(request.body)
except ValueError as e:
return JsonResponse({'reason': 'JSON decode error', 'details': e.message}, status=400)
+ case_id = data.get('case_id')
+ if case_id is None:
+ return JsonResponse(
+ {'reason': 'Request object needs to have a "case_id" field'}, status=400)
+
+ action_data = data.get('action', {})
+ success, payload = pod.perform_action(action_data.get('type'), action_data.get('payload', {}))
+ if success is True:
+ case = Case.objects.get(id=case_id)
+ CaseAction.create(case, request.user, CaseAction.ADD_NOTE, note=payload.get('message'))
+
return JsonResponse(pod.perform_action(data))
|
98552a4cb683e25ec9af53024e58644c04b55872
|
molly/external_media/views.py
|
molly/external_media/views.py
|
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
response = HttpResponse(open(eis.get_filename(), 'rb').read(), mimetype=eis.content_type.encode('ascii'))
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
try:
response = HttpResponse(open(eis.get_filename(), 'rb').read(),
mimetype=eis.content_type.encode('ascii'))
except IOError:
eis.delete()
raise Http404()
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
Handle missing external files gracefully
|
MOX-182: Handle missing external files gracefully
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
- response = HttpResponse(open(eis.get_filename(), 'rb').read(), mimetype=eis.content_type.encode('ascii'))
+ try:
+ response = HttpResponse(open(eis.get_filename(), 'rb').read(),
+ mimetype=eis.content_type.encode('ascii'))
+ except IOError:
+ eis.delete()
+ raise Http404()
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
Handle missing external files gracefully
|
## Code Before:
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
response = HttpResponse(open(eis.get_filename(), 'rb').read(), mimetype=eis.content_type.encode('ascii'))
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
## Instruction:
Handle missing external files gracefully
## Code After:
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
try:
response = HttpResponse(open(eis.get_filename(), 'rb').read(),
mimetype=eis.content_type.encode('ascii'))
except IOError:
eis.delete()
raise Http404()
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
- response = HttpResponse(open(eis.get_filename(), 'rb').read(), mimetype=eis.content_type.encode('ascii'))
+ try:
+ response = HttpResponse(open(eis.get_filename(), 'rb').read(),
+ mimetype=eis.content_type.encode('ascii'))
+ except IOError:
+ eis.delete()
+ raise Http404()
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
52077ba667efcf596c9186dbbd8d7cedc95d624d
|
tests/document_test.py
|
tests/document_test.py
|
from document import Document
import unittest
class TestDocument(unittest.TestCase):
def _get_document(self):
spec = {
"author": "John Humphreys",
"content": {
"title": "How to make cookies",
"text": "First start by pre-heating the oven..."
},
"category": "cooking",
"comments": [
{
"commenter": "Julio Cesar",
"email": "[email protected]",
"comment": "Great post dude!"
},
{
"commenter": "Michael Andrews",
"comment": "My wife loves these."
}
],
"tags": ["recipe", "cookies"]
}
return Document(spec)
def test_create_with_invalid_key_names(self):
with self.assertRaises(Exception):
Document({'contains space': 34})
def test_creates_nested_document_tree(self):
document = self._get_document()
self.assertIsInstance(document['content'], Document)
self.assertIsInstance(document['comments'][0], Document)
def test_provides_attribute_getters(self):
document = self._get_document()
self.assertEqual("cooking", document.category)
self.assertEqual("Julio Cesar", document.comments[0].commenter)
def test_provides_attribute_setters(self):
document = self._get_document()
document.category = "baking"
self.assertEqual("baking", document['category'])
|
from document import Document
import unittest
class TestDocument(unittest.TestCase):
def _get_document(self):
spec = {
"author": "John Humphreys",
"content": {
"title": "How to make cookies",
"text": "First start by pre-heating the oven..."
},
"category": "cooking",
"comments": [
{
"commenter": "Julio Cesar",
"email": "[email protected]",
"comment": "Great post dude!"
},
{
"commenter": "Michael Andrews",
"comment": "My wife loves these."
}
],
"tags": ["recipe", "cookies"]
}
return Document(spec)
def test_create_with_invalid_key_names(self):
with self.assertRaises(Exception):
Document({'contains space': 34})
with self.assertRaises(Exception):
Document({'': 45})
def test_creates_nested_document_tree(self):
document = self._get_document()
self.assertIsInstance(document['content'], Document)
self.assertIsInstance(document['comments'][0], Document)
def test_provides_attribute_getters(self):
document = self._get_document()
self.assertEqual("cooking", document.category)
self.assertEqual("Julio Cesar", document.comments[0].commenter)
def test_provides_attribute_setters(self):
document = self._get_document()
document.category = "baking"
self.assertEqual("baking", document['category'])
|
Test blank keys are not allowed.
|
Test blank keys are not allowed.
|
Python
|
mit
|
gamechanger/schemer,gamechanger/mongothon
|
from document import Document
import unittest
class TestDocument(unittest.TestCase):
def _get_document(self):
spec = {
"author": "John Humphreys",
"content": {
"title": "How to make cookies",
"text": "First start by pre-heating the oven..."
},
"category": "cooking",
"comments": [
{
"commenter": "Julio Cesar",
"email": "[email protected]",
"comment": "Great post dude!"
},
{
"commenter": "Michael Andrews",
"comment": "My wife loves these."
}
],
"tags": ["recipe", "cookies"]
}
return Document(spec)
def test_create_with_invalid_key_names(self):
with self.assertRaises(Exception):
Document({'contains space': 34})
+ with self.assertRaises(Exception):
+ Document({'': 45})
+
def test_creates_nested_document_tree(self):
document = self._get_document()
self.assertIsInstance(document['content'], Document)
self.assertIsInstance(document['comments'][0], Document)
def test_provides_attribute_getters(self):
document = self._get_document()
self.assertEqual("cooking", document.category)
self.assertEqual("Julio Cesar", document.comments[0].commenter)
def test_provides_attribute_setters(self):
document = self._get_document()
document.category = "baking"
self.assertEqual("baking", document['category'])
|
Test blank keys are not allowed.
|
## Code Before:
from document import Document
import unittest
class TestDocument(unittest.TestCase):
def _get_document(self):
spec = {
"author": "John Humphreys",
"content": {
"title": "How to make cookies",
"text": "First start by pre-heating the oven..."
},
"category": "cooking",
"comments": [
{
"commenter": "Julio Cesar",
"email": "[email protected]",
"comment": "Great post dude!"
},
{
"commenter": "Michael Andrews",
"comment": "My wife loves these."
}
],
"tags": ["recipe", "cookies"]
}
return Document(spec)
def test_create_with_invalid_key_names(self):
with self.assertRaises(Exception):
Document({'contains space': 34})
def test_creates_nested_document_tree(self):
document = self._get_document()
self.assertIsInstance(document['content'], Document)
self.assertIsInstance(document['comments'][0], Document)
def test_provides_attribute_getters(self):
document = self._get_document()
self.assertEqual("cooking", document.category)
self.assertEqual("Julio Cesar", document.comments[0].commenter)
def test_provides_attribute_setters(self):
document = self._get_document()
document.category = "baking"
self.assertEqual("baking", document['category'])
## Instruction:
Test blank keys are not allowed.
## Code After:
from document import Document
import unittest
class TestDocument(unittest.TestCase):
def _get_document(self):
spec = {
"author": "John Humphreys",
"content": {
"title": "How to make cookies",
"text": "First start by pre-heating the oven..."
},
"category": "cooking",
"comments": [
{
"commenter": "Julio Cesar",
"email": "[email protected]",
"comment": "Great post dude!"
},
{
"commenter": "Michael Andrews",
"comment": "My wife loves these."
}
],
"tags": ["recipe", "cookies"]
}
return Document(spec)
def test_create_with_invalid_key_names(self):
with self.assertRaises(Exception):
Document({'contains space': 34})
with self.assertRaises(Exception):
Document({'': 45})
def test_creates_nested_document_tree(self):
document = self._get_document()
self.assertIsInstance(document['content'], Document)
self.assertIsInstance(document['comments'][0], Document)
def test_provides_attribute_getters(self):
document = self._get_document()
self.assertEqual("cooking", document.category)
self.assertEqual("Julio Cesar", document.comments[0].commenter)
def test_provides_attribute_setters(self):
document = self._get_document()
document.category = "baking"
self.assertEqual("baking", document['category'])
|
from document import Document
import unittest
class TestDocument(unittest.TestCase):
def _get_document(self):
spec = {
"author": "John Humphreys",
"content": {
"title": "How to make cookies",
"text": "First start by pre-heating the oven..."
},
"category": "cooking",
"comments": [
{
"commenter": "Julio Cesar",
"email": "[email protected]",
"comment": "Great post dude!"
},
{
"commenter": "Michael Andrews",
"comment": "My wife loves these."
}
],
"tags": ["recipe", "cookies"]
}
return Document(spec)
def test_create_with_invalid_key_names(self):
with self.assertRaises(Exception):
Document({'contains space': 34})
+ with self.assertRaises(Exception):
+ Document({'': 45})
+
def test_creates_nested_document_tree(self):
document = self._get_document()
self.assertIsInstance(document['content'], Document)
self.assertIsInstance(document['comments'][0], Document)
def test_provides_attribute_getters(self):
document = self._get_document()
self.assertEqual("cooking", document.category)
self.assertEqual("Julio Cesar", document.comments[0].commenter)
def test_provides_attribute_setters(self):
document = self._get_document()
document.category = "baking"
self.assertEqual("baking", document['category'])
|
06a70ae323f0eb1fe50c1f01a31ef9548a24b00c
|
tests/test_favicons.py
|
tests/test_favicons.py
|
from django.test import TestCase
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses, TestCase
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
Use base TestCase to properly cleanup indices
|
Use base TestCase to properly cleanup indices
|
Python
|
bsd-3-clause
|
rmoorman/feedhq,feedhq/feedhq,rmoorman/feedhq,feedhq/feedhq,rmoorman/feedhq,rmoorman/feedhq,rmoorman/feedhq,vincentbernat/feedhq,feedhq/feedhq,vincentbernat/feedhq,feedhq/feedhq,vincentbernat/feedhq,feedhq/feedhq,vincentbernat/feedhq,vincentbernat/feedhq
|
- from django.test import TestCase
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
- from . import responses
+ from . import responses, TestCase
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
Use base TestCase to properly cleanup indices
|
## Code Before:
from django.test import TestCase
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
## Instruction:
Use base TestCase to properly cleanup indices
## Code After:
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses, TestCase
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
- from django.test import TestCase
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
- from . import responses
+ from . import responses, TestCase
? ++++++++++
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
483cf7f91a89e040184bd71a0a1c59c0e0926e34
|
elasticmapping/types.py
|
elasticmapping/types.py
|
class CallableDict(dict):
BASE = None
OVERRIDES = None
def __call__(self, overrides):
new_dict = CallableDict(self)
new_dict.OVERRIDES = overrides
new_dict.BASE = self
return new_dict
BASE_TYPE = {
'store': False,
'doc_values': False
}
STRING = CallableDict({
'type': 'string',
'index': 'analyzed'
})
FLOAT = CallableDict({
'type': 'float'
})
DOUBLE = CallableDict({
'type': 'double'
})
INTEGER = CallableDict({
'type': 'integer'
})
LONG = CallableDict({
'type': 'long'
})
SHORT = CallableDict({
'type': 'short'
})
BYTE = CallableDict({
'type': 'byte'
})
BOOLEAN = CallableDict({
'type': 'boolean'
})
DATE = CallableDict({
'type': 'date',
'format': 'date'
})
DATETIME = CallableDict({
'type': 'date',
'format': 'date_hour_minute_second_fraction'
})
TYPES = {
name: type
for name, type in locals().items()
if isinstance(type, CallableDict)
}
|
class CallableDict(dict):
BASE = None
OVERRIDES = None
def __call__(self, overrides):
new_dict = CallableDict(self)
new_dict.OVERRIDES = overrides
new_dict.BASE = self
return new_dict
BASE_TYPE = {
'store': False,
'doc_values': False
}
STRING = CallableDict({
'type': 'string',
'index': 'analyzed'
})
FLOAT = CallableDict({
'type': 'float'
})
DOUBLE = CallableDict({
'type': 'double'
})
INTEGER = CallableDict({
'type': 'integer'
})
LONG = CallableDict({
'type': 'long'
})
SHORT = CallableDict({
'type': 'short'
})
BYTE = CallableDict({
'type': 'byte'
})
BOOLEAN = CallableDict({
'type': 'boolean'
})
DATE = CallableDict({
'type': 'date',
'format': 'date'
})
TIME = CallableDict({
'type': 'date',
'format': 'time'
})
DATETIME = CallableDict({
'type': 'date',
'format': 'date_optional_time'
})
TYPES = {
name: type
for name, type in locals().items()
if isinstance(type, CallableDict)
}
|
Switch default to actual ES default (date_optional_time) and add TIME type
|
Switch default to actual ES default (date_optional_time) and add TIME type
|
Python
|
mit
|
Fizzadar/ElasticMapping,Fizzadar/ElasticMapping
|
class CallableDict(dict):
BASE = None
OVERRIDES = None
def __call__(self, overrides):
new_dict = CallableDict(self)
new_dict.OVERRIDES = overrides
new_dict.BASE = self
return new_dict
BASE_TYPE = {
'store': False,
'doc_values': False
}
STRING = CallableDict({
'type': 'string',
'index': 'analyzed'
})
FLOAT = CallableDict({
'type': 'float'
})
DOUBLE = CallableDict({
'type': 'double'
})
INTEGER = CallableDict({
'type': 'integer'
})
LONG = CallableDict({
'type': 'long'
})
SHORT = CallableDict({
'type': 'short'
})
BYTE = CallableDict({
'type': 'byte'
})
BOOLEAN = CallableDict({
'type': 'boolean'
})
DATE = CallableDict({
'type': 'date',
'format': 'date'
})
+ TIME = CallableDict({
+ 'type': 'date',
+ 'format': 'time'
+ })
+
DATETIME = CallableDict({
'type': 'date',
- 'format': 'date_hour_minute_second_fraction'
+ 'format': 'date_optional_time'
})
TYPES = {
name: type
for name, type in locals().items()
if isinstance(type, CallableDict)
}
|
Switch default to actual ES default (date_optional_time) and add TIME type
|
## Code Before:
class CallableDict(dict):
BASE = None
OVERRIDES = None
def __call__(self, overrides):
new_dict = CallableDict(self)
new_dict.OVERRIDES = overrides
new_dict.BASE = self
return new_dict
BASE_TYPE = {
'store': False,
'doc_values': False
}
STRING = CallableDict({
'type': 'string',
'index': 'analyzed'
})
FLOAT = CallableDict({
'type': 'float'
})
DOUBLE = CallableDict({
'type': 'double'
})
INTEGER = CallableDict({
'type': 'integer'
})
LONG = CallableDict({
'type': 'long'
})
SHORT = CallableDict({
'type': 'short'
})
BYTE = CallableDict({
'type': 'byte'
})
BOOLEAN = CallableDict({
'type': 'boolean'
})
DATE = CallableDict({
'type': 'date',
'format': 'date'
})
DATETIME = CallableDict({
'type': 'date',
'format': 'date_hour_minute_second_fraction'
})
TYPES = {
name: type
for name, type in locals().items()
if isinstance(type, CallableDict)
}
## Instruction:
Switch default to actual ES default (date_optional_time) and add TIME type
## Code After:
class CallableDict(dict):
BASE = None
OVERRIDES = None
def __call__(self, overrides):
new_dict = CallableDict(self)
new_dict.OVERRIDES = overrides
new_dict.BASE = self
return new_dict
BASE_TYPE = {
'store': False,
'doc_values': False
}
STRING = CallableDict({
'type': 'string',
'index': 'analyzed'
})
FLOAT = CallableDict({
'type': 'float'
})
DOUBLE = CallableDict({
'type': 'double'
})
INTEGER = CallableDict({
'type': 'integer'
})
LONG = CallableDict({
'type': 'long'
})
SHORT = CallableDict({
'type': 'short'
})
BYTE = CallableDict({
'type': 'byte'
})
BOOLEAN = CallableDict({
'type': 'boolean'
})
DATE = CallableDict({
'type': 'date',
'format': 'date'
})
TIME = CallableDict({
'type': 'date',
'format': 'time'
})
DATETIME = CallableDict({
'type': 'date',
'format': 'date_optional_time'
})
TYPES = {
name: type
for name, type in locals().items()
if isinstance(type, CallableDict)
}
|
class CallableDict(dict):
BASE = None
OVERRIDES = None
def __call__(self, overrides):
new_dict = CallableDict(self)
new_dict.OVERRIDES = overrides
new_dict.BASE = self
return new_dict
BASE_TYPE = {
'store': False,
'doc_values': False
}
STRING = CallableDict({
'type': 'string',
'index': 'analyzed'
})
FLOAT = CallableDict({
'type': 'float'
})
DOUBLE = CallableDict({
'type': 'double'
})
INTEGER = CallableDict({
'type': 'integer'
})
LONG = CallableDict({
'type': 'long'
})
SHORT = CallableDict({
'type': 'short'
})
BYTE = CallableDict({
'type': 'byte'
})
BOOLEAN = CallableDict({
'type': 'boolean'
})
DATE = CallableDict({
'type': 'date',
'format': 'date'
})
+ TIME = CallableDict({
+ 'type': 'date',
+ 'format': 'time'
+ })
+
DATETIME = CallableDict({
'type': 'date',
- 'format': 'date_hour_minute_second_fraction'
+ 'format': 'date_optional_time'
})
TYPES = {
name: type
for name, type in locals().items()
if isinstance(type, CallableDict)
}
|
0c186d8e0fb5bd7170ec55943e546f1e4e335839
|
masters/master.tryserver.chromium/master_site_config.py
|
masters/master.tryserver.chromium/master_site_config.py
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServer(Master.Master4):
project_name = 'Chromium Try Server'
master_port = 8028
slave_port = 8128
master_port_alt = 8228
try_job_port = 8328
# Select tree status urls and codereview location.
reply_to = '[email protected]'
base_app_url = 'https://chromium-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
last_good_blink_url = 'http://blink-status.appspot.com/lkgr'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try'
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServer(Master.Master4):
project_name = 'Chromium Try Server'
master_port = 8028
slave_port = 8128
master_port_alt = 8228
try_job_port = 8328
# Select tree status urls and codereview location.
reply_to = '[email protected]'
base_app_url = 'https://chromium-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = None
last_good_blink_url = None
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try'
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
|
Remove last good URL for tryserver.chromium
|
Remove last good URL for tryserver.chromium
The expected behavior of this change is that the tryserver master no longer tries
to resolve revisions to LKGR when trying jobs.
BUG=372499, 386667
Review URL: https://codereview.chromium.org/394653002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@283469 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServer(Master.Master4):
project_name = 'Chromium Try Server'
master_port = 8028
slave_port = 8128
master_port_alt = 8228
try_job_port = 8328
# Select tree status urls and codereview location.
reply_to = '[email protected]'
base_app_url = 'https://chromium-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
- last_good_url = base_app_url + '/lkgr'
- last_good_blink_url = 'http://blink-status.appspot.com/lkgr'
+ last_good_url = None
+ last_good_blink_url = None
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try'
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
|
Remove last good URL for tryserver.chromium
|
## Code Before:
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServer(Master.Master4):
project_name = 'Chromium Try Server'
master_port = 8028
slave_port = 8128
master_port_alt = 8228
try_job_port = 8328
# Select tree status urls and codereview location.
reply_to = '[email protected]'
base_app_url = 'https://chromium-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
last_good_blink_url = 'http://blink-status.appspot.com/lkgr'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try'
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
## Instruction:
Remove last good URL for tryserver.chromium
## Code After:
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServer(Master.Master4):
project_name = 'Chromium Try Server'
master_port = 8028
slave_port = 8128
master_port_alt = 8228
try_job_port = 8328
# Select tree status urls and codereview location.
reply_to = '[email protected]'
base_app_url = 'https://chromium-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = None
last_good_blink_url = None
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try'
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServer(Master.Master4):
project_name = 'Chromium Try Server'
master_port = 8028
slave_port = 8128
master_port_alt = 8228
try_job_port = 8328
# Select tree status urls and codereview location.
reply_to = '[email protected]'
base_app_url = 'https://chromium-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
- last_good_url = base_app_url + '/lkgr'
- last_good_blink_url = 'http://blink-status.appspot.com/lkgr'
+ last_good_url = None
+ last_good_blink_url = None
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try'
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
|
7c65017fa16632f21eb94896a3d7c8d2cce989dd
|
user/admin.py
|
user/admin.py
|
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
|
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'get_name',
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
def get_name(self, user):
return user.profile.name
get_name.short_description = 'Name'
get_name.admin_order_field = 'profile__name'
|
Add Profile name to UserAdmin list.
|
Ch23: Add Profile name to UserAdmin list.
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
+ 'get_name',
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
+ def get_name(self, user):
+ return user.profile.name
+ get_name.short_description = 'Name'
+ get_name.admin_order_field = 'profile__name'
+
|
Add Profile name to UserAdmin list.
|
## Code Before:
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
## Instruction:
Add Profile name to UserAdmin list.
## Code After:
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'get_name',
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
def get_name(self, user):
return user.profile.name
get_name.short_description = 'Name'
get_name.admin_order_field = 'profile__name'
|
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
+ 'get_name',
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
+
+ def get_name(self, user):
+ return user.profile.name
+ get_name.short_description = 'Name'
+ get_name.admin_order_field = 'profile__name'
|
cbadf5c564d7f5f701499409e2ae77ff90ba477c
|
tests/test_tensorflow.py
|
tests/test_tensorflow.py
|
import unittest
import numpy as np
import tensorflow as tf
from common import gpu_test
class TestTensorflow(unittest.TestCase):
def test_addition(self):
op = tf.add(2, 3)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(5, result)
@gpu_test
def test_gpu(self):
with tf.device('/gpu:0'):
m1 = tf.constant([2.0, 3.0], shape=[1, 2], name='a')
m2 = tf.constant([3.0, 4.0], shape=[2, 1], name='b')
op = tf.matmul(m1, m2)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(np.array(18, dtype=np.float32, ndmin=2), result)
|
import unittest
import numpy as np
import tensorflow as tf
from common import gpu_test
class TestTensorflow(unittest.TestCase):
def test_addition(self):
op = tf.add(2, 3)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(5, result)
def test_conv2d(self):
input = tf.random_normal([1,2,2,1])
filter = tf.random_normal([1,1,1,1])
op = tf.nn.conv2d(input, filter, strides=[1, 1, 1, 1], padding='SAME')
with tf.Session() as sess:
result = sess.run(op)
self.assertEqual(4, len(result.shape))
@gpu_test
def test_gpu(self):
with tf.device('/gpu:0'):
m1 = tf.constant([2.0, 3.0], shape=[1, 2], name='a')
m2 = tf.constant([3.0, 4.0], shape=[2, 1], name='b')
op = tf.matmul(m1, m2)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(np.array(18, dtype=np.float32, ndmin=2), result)
|
Add conv2d test for tensorflow
|
Add conv2d test for tensorflow
|
Python
|
apache-2.0
|
Kaggle/docker-python,Kaggle/docker-python
|
import unittest
import numpy as np
import tensorflow as tf
from common import gpu_test
class TestTensorflow(unittest.TestCase):
def test_addition(self):
op = tf.add(2, 3)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(5, result)
+
+ def test_conv2d(self):
+ input = tf.random_normal([1,2,2,1])
+ filter = tf.random_normal([1,1,1,1])
+
+ op = tf.nn.conv2d(input, filter, strides=[1, 1, 1, 1], padding='SAME')
+ with tf.Session() as sess:
+ result = sess.run(op)
+ self.assertEqual(4, len(result.shape))
@gpu_test
def test_gpu(self):
with tf.device('/gpu:0'):
m1 = tf.constant([2.0, 3.0], shape=[1, 2], name='a')
m2 = tf.constant([3.0, 4.0], shape=[2, 1], name='b')
op = tf.matmul(m1, m2)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(np.array(18, dtype=np.float32, ndmin=2), result)
|
Add conv2d test for tensorflow
|
## Code Before:
import unittest
import numpy as np
import tensorflow as tf
from common import gpu_test
class TestTensorflow(unittest.TestCase):
def test_addition(self):
op = tf.add(2, 3)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(5, result)
@gpu_test
def test_gpu(self):
with tf.device('/gpu:0'):
m1 = tf.constant([2.0, 3.0], shape=[1, 2], name='a')
m2 = tf.constant([3.0, 4.0], shape=[2, 1], name='b')
op = tf.matmul(m1, m2)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(np.array(18, dtype=np.float32, ndmin=2), result)
## Instruction:
Add conv2d test for tensorflow
## Code After:
import unittest
import numpy as np
import tensorflow as tf
from common import gpu_test
class TestTensorflow(unittest.TestCase):
def test_addition(self):
op = tf.add(2, 3)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(5, result)
def test_conv2d(self):
input = tf.random_normal([1,2,2,1])
filter = tf.random_normal([1,1,1,1])
op = tf.nn.conv2d(input, filter, strides=[1, 1, 1, 1], padding='SAME')
with tf.Session() as sess:
result = sess.run(op)
self.assertEqual(4, len(result.shape))
@gpu_test
def test_gpu(self):
with tf.device('/gpu:0'):
m1 = tf.constant([2.0, 3.0], shape=[1, 2], name='a')
m2 = tf.constant([3.0, 4.0], shape=[2, 1], name='b')
op = tf.matmul(m1, m2)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(np.array(18, dtype=np.float32, ndmin=2), result)
|
import unittest
import numpy as np
import tensorflow as tf
from common import gpu_test
class TestTensorflow(unittest.TestCase):
def test_addition(self):
op = tf.add(2, 3)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(5, result)
+
+ def test_conv2d(self):
+ input = tf.random_normal([1,2,2,1])
+ filter = tf.random_normal([1,1,1,1])
+
+ op = tf.nn.conv2d(input, filter, strides=[1, 1, 1, 1], padding='SAME')
+ with tf.Session() as sess:
+ result = sess.run(op)
+ self.assertEqual(4, len(result.shape))
@gpu_test
def test_gpu(self):
with tf.device('/gpu:0'):
m1 = tf.constant([2.0, 3.0], shape=[1, 2], name='a')
m2 = tf.constant([3.0, 4.0], shape=[2, 1], name='b')
op = tf.matmul(m1, m2)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(np.array(18, dtype=np.float32, ndmin=2), result)
|
ea180a007c1a5bfaeb56e6b223610876b0619e63
|
webmaster_verification/views.py
|
webmaster_verification/views.py
|
import logging
logger = logging.getLogger(__name__)
from django.http import Http404
from django.views.generic import TemplateView
import settings
class VerificationView(TemplateView):
"""
This simply adds the verification key to the view context and makes sure
we return a 404 if the key wasn't set for the provider
"""
def get_context_data(self, **kwargs):
context = super(VerificationView, self).get_context_data(**kwargs)
try:
context['%s_verification' % self.provider] = settings.WEBMASTER_VERIFICATION[self.provider]
except KeyError:
raise Http404
except AttributeError:
logger.info("WEBMASTER_VERIFICATION not defined in settings")
raise Http404
return context
class GoogleVerificationView(VerificationView):
template_name = 'webmaster_verification/google_verify_template.html'
provider = 'google'
class BingVerificationView(VerificationView):
template_name = 'webmaster_verification/bing_verify_template.xml'
provider = 'bing'
class MajesticVerificationView(VerificationView):
template_name = 'webmaster_verification/majestic_verify_template.txt'
provider = 'majestic'
|
import logging
logger = logging.getLogger(__name__)
from django.http import Http404
from django.views.generic import TemplateView
import settings
class VerificationView(TemplateView):
"""
This simply adds the verification key to the view context and makes sure
we return a 404 if the key wasn't set for the provider
"""
def get_context_data(self, **kwargs):
context = super(VerificationView, self).get_context_data(**kwargs)
try:
context['%s_verification' % self.provider] = settings.WEBMASTER_VERIFICATION[self.provider]
except KeyError:
raise Http404
except AttributeError:
logger.info("WEBMASTER_VERIFICATION not defined in settings")
raise Http404
return context
class VerificationTextView(VerificationView):
"""
Return proper content type
"""
def render_to_response(self, context, **kwargs):
return super(VerificationTextView, self).render_to_response(
context,
content_type='text/plain',
**kwargs
)
class VerificationXMLView(VerificationView):
"""
Return proper content type
"""
def render_to_response(self, context, **kwargs):
return super(VerificationXMLView, self).render_to_response(
context,
content_type='text/xml',
**kwargs
)
class GoogleVerificationView(VerificationView):
template_name = 'webmaster_verification/google_verify_template.html'
provider = 'google'
class BingVerificationView(VerificationXMLView):
template_name = 'webmaster_verification/bing_verify_template.xml'
provider = 'bing'
class MajesticVerificationView(VerificationTextView):
template_name = 'webmaster_verification/majestic_verify_template.txt'
provider = 'majestic'
|
Use proper content-type for all files
|
Use proper content-type for all files
|
Python
|
bsd-3-clause
|
nkuttler/django-webmaster-verification,nkuttler/django-webmaster-verification
|
import logging
logger = logging.getLogger(__name__)
from django.http import Http404
from django.views.generic import TemplateView
import settings
class VerificationView(TemplateView):
"""
This simply adds the verification key to the view context and makes sure
we return a 404 if the key wasn't set for the provider
"""
def get_context_data(self, **kwargs):
context = super(VerificationView, self).get_context_data(**kwargs)
try:
context['%s_verification' % self.provider] = settings.WEBMASTER_VERIFICATION[self.provider]
except KeyError:
raise Http404
except AttributeError:
logger.info("WEBMASTER_VERIFICATION not defined in settings")
raise Http404
return context
+ class VerificationTextView(VerificationView):
+ """
+ Return proper content type
+ """
+ def render_to_response(self, context, **kwargs):
+ return super(VerificationTextView, self).render_to_response(
+ context,
+ content_type='text/plain',
+ **kwargs
+ )
+
+
+ class VerificationXMLView(VerificationView):
+ """
+ Return proper content type
+ """
+ def render_to_response(self, context, **kwargs):
+ return super(VerificationXMLView, self).render_to_response(
+ context,
+ content_type='text/xml',
+ **kwargs
+ )
+
class GoogleVerificationView(VerificationView):
template_name = 'webmaster_verification/google_verify_template.html'
provider = 'google'
- class BingVerificationView(VerificationView):
+ class BingVerificationView(VerificationXMLView):
template_name = 'webmaster_verification/bing_verify_template.xml'
provider = 'bing'
- class MajesticVerificationView(VerificationView):
+ class MajesticVerificationView(VerificationTextView):
template_name = 'webmaster_verification/majestic_verify_template.txt'
provider = 'majestic'
|
Use proper content-type for all files
|
## Code Before:
import logging
logger = logging.getLogger(__name__)
from django.http import Http404
from django.views.generic import TemplateView
import settings
class VerificationView(TemplateView):
"""
This simply adds the verification key to the view context and makes sure
we return a 404 if the key wasn't set for the provider
"""
def get_context_data(self, **kwargs):
context = super(VerificationView, self).get_context_data(**kwargs)
try:
context['%s_verification' % self.provider] = settings.WEBMASTER_VERIFICATION[self.provider]
except KeyError:
raise Http404
except AttributeError:
logger.info("WEBMASTER_VERIFICATION not defined in settings")
raise Http404
return context
class GoogleVerificationView(VerificationView):
template_name = 'webmaster_verification/google_verify_template.html'
provider = 'google'
class BingVerificationView(VerificationView):
template_name = 'webmaster_verification/bing_verify_template.xml'
provider = 'bing'
class MajesticVerificationView(VerificationView):
template_name = 'webmaster_verification/majestic_verify_template.txt'
provider = 'majestic'
## Instruction:
Use proper content-type for all files
## Code After:
import logging
logger = logging.getLogger(__name__)
from django.http import Http404
from django.views.generic import TemplateView
import settings
class VerificationView(TemplateView):
"""
This simply adds the verification key to the view context and makes sure
we return a 404 if the key wasn't set for the provider
"""
def get_context_data(self, **kwargs):
context = super(VerificationView, self).get_context_data(**kwargs)
try:
context['%s_verification' % self.provider] = settings.WEBMASTER_VERIFICATION[self.provider]
except KeyError:
raise Http404
except AttributeError:
logger.info("WEBMASTER_VERIFICATION not defined in settings")
raise Http404
return context
class VerificationTextView(VerificationView):
"""
Return proper content type
"""
def render_to_response(self, context, **kwargs):
return super(VerificationTextView, self).render_to_response(
context,
content_type='text/plain',
**kwargs
)
class VerificationXMLView(VerificationView):
"""
Return proper content type
"""
def render_to_response(self, context, **kwargs):
return super(VerificationXMLView, self).render_to_response(
context,
content_type='text/xml',
**kwargs
)
class GoogleVerificationView(VerificationView):
template_name = 'webmaster_verification/google_verify_template.html'
provider = 'google'
class BingVerificationView(VerificationXMLView):
template_name = 'webmaster_verification/bing_verify_template.xml'
provider = 'bing'
class MajesticVerificationView(VerificationTextView):
template_name = 'webmaster_verification/majestic_verify_template.txt'
provider = 'majestic'
|
import logging
logger = logging.getLogger(__name__)
from django.http import Http404
from django.views.generic import TemplateView
import settings
class VerificationView(TemplateView):
"""
This simply adds the verification key to the view context and makes sure
we return a 404 if the key wasn't set for the provider
"""
def get_context_data(self, **kwargs):
context = super(VerificationView, self).get_context_data(**kwargs)
try:
context['%s_verification' % self.provider] = settings.WEBMASTER_VERIFICATION[self.provider]
except KeyError:
raise Http404
except AttributeError:
logger.info("WEBMASTER_VERIFICATION not defined in settings")
raise Http404
return context
+ class VerificationTextView(VerificationView):
+ """
+ Return proper content type
+ """
+ def render_to_response(self, context, **kwargs):
+ return super(VerificationTextView, self).render_to_response(
+ context,
+ content_type='text/plain',
+ **kwargs
+ )
+
+
+ class VerificationXMLView(VerificationView):
+ """
+ Return proper content type
+ """
+ def render_to_response(self, context, **kwargs):
+ return super(VerificationXMLView, self).render_to_response(
+ context,
+ content_type='text/xml',
+ **kwargs
+ )
+
class GoogleVerificationView(VerificationView):
template_name = 'webmaster_verification/google_verify_template.html'
provider = 'google'
- class BingVerificationView(VerificationView):
+ class BingVerificationView(VerificationXMLView):
? +++
template_name = 'webmaster_verification/bing_verify_template.xml'
provider = 'bing'
- class MajesticVerificationView(VerificationView):
+ class MajesticVerificationView(VerificationTextView):
? ++++
template_name = 'webmaster_verification/majestic_verify_template.txt'
provider = 'majestic'
|
a7af81244972ae6ac30bd55260af46b7ce25a6e1
|
pre_commit_hooks/no_commit_to_branch.py
|
pre_commit_hooks/no_commit_to_branch.py
|
from __future__ import print_function
import argparse
import re
from typing import Optional
from typing import Sequence
from typing import Set
from pre_commit_hooks.util import CalledProcessError
from pre_commit_hooks.util import cmd_output
def is_on_branch(protected, patterns=set()):
# type: (Set[str], Set[str]) -> bool
try:
ref_name = cmd_output('git', 'symbolic-ref', 'HEAD')
except CalledProcessError:
return False
chunks = ref_name.strip().split('/')
branch_name = '/'.join(chunks[2:])
return branch_name in protected or any(
re.match(p, branch_name) for p in patterns
)
def main(argv=None): # type: (Optional[Sequence[str]]) -> int
parser = argparse.ArgumentParser()
parser.add_argument(
'-b', '--branch', action='append',
help='branch to disallow commits to, may be specified multiple times',
)
parser.add_argument(
'-p', '--pattern', action='append',
help=(
'regex pattern for branch name to disallow commits to, '
'May be specified multiple times'
),
)
args = parser.parse_args(argv)
protected = set(args.branch or ('master',))
patterns = set(args.pattern or ())
return int(is_on_branch(protected, patterns))
if __name__ == '__main__':
exit(main())
|
from __future__ import print_function
import argparse
import re
from typing import FrozenSet
from typing import Optional
from typing import Sequence
from pre_commit_hooks.util import CalledProcessError
from pre_commit_hooks.util import cmd_output
def is_on_branch(protected, patterns=frozenset()):
# type: (FrozenSet[str], FrozenSet[str]) -> bool
try:
ref_name = cmd_output('git', 'symbolic-ref', 'HEAD')
except CalledProcessError:
return False
chunks = ref_name.strip().split('/')
branch_name = '/'.join(chunks[2:])
return branch_name in protected or any(
re.match(p, branch_name) for p in patterns
)
def main(argv=None): # type: (Optional[Sequence[str]]) -> int
parser = argparse.ArgumentParser()
parser.add_argument(
'-b', '--branch', action='append',
help='branch to disallow commits to, may be specified multiple times',
)
parser.add_argument(
'-p', '--pattern', action='append',
help=(
'regex pattern for branch name to disallow commits to, '
'may be specified multiple times'
),
)
args = parser.parse_args(argv)
protected = frozenset(args.branch or ('master',))
patterns = frozenset(args.pattern or ())
return int(is_on_branch(protected, patterns))
if __name__ == '__main__':
exit(main())
|
Make optional argument use an immutable set for the default value in no-commit-to-branch. Make other sets immutable to satisfy type-checking and be consistent
|
Make optional argument use an immutable set for the default value
in no-commit-to-branch. Make other sets immutable to satisfy type-checking
and be consistent
|
Python
|
mit
|
pre-commit/pre-commit-hooks
|
from __future__ import print_function
import argparse
import re
+ from typing import FrozenSet
from typing import Optional
from typing import Sequence
- from typing import Set
from pre_commit_hooks.util import CalledProcessError
from pre_commit_hooks.util import cmd_output
- def is_on_branch(protected, patterns=set()):
+ def is_on_branch(protected, patterns=frozenset()):
- # type: (Set[str], Set[str]) -> bool
+ # type: (FrozenSet[str], FrozenSet[str]) -> bool
try:
ref_name = cmd_output('git', 'symbolic-ref', 'HEAD')
except CalledProcessError:
return False
chunks = ref_name.strip().split('/')
branch_name = '/'.join(chunks[2:])
return branch_name in protected or any(
re.match(p, branch_name) for p in patterns
)
def main(argv=None): # type: (Optional[Sequence[str]]) -> int
parser = argparse.ArgumentParser()
parser.add_argument(
'-b', '--branch', action='append',
help='branch to disallow commits to, may be specified multiple times',
)
parser.add_argument(
'-p', '--pattern', action='append',
help=(
'regex pattern for branch name to disallow commits to, '
- 'May be specified multiple times'
+ 'may be specified multiple times'
),
)
args = parser.parse_args(argv)
- protected = set(args.branch or ('master',))
+ protected = frozenset(args.branch or ('master',))
- patterns = set(args.pattern or ())
+ patterns = frozenset(args.pattern or ())
return int(is_on_branch(protected, patterns))
if __name__ == '__main__':
exit(main())
|
Make optional argument use an immutable set for the default value in no-commit-to-branch. Make other sets immutable to satisfy type-checking and be consistent
|
## Code Before:
from __future__ import print_function
import argparse
import re
from typing import Optional
from typing import Sequence
from typing import Set
from pre_commit_hooks.util import CalledProcessError
from pre_commit_hooks.util import cmd_output
def is_on_branch(protected, patterns=set()):
# type: (Set[str], Set[str]) -> bool
try:
ref_name = cmd_output('git', 'symbolic-ref', 'HEAD')
except CalledProcessError:
return False
chunks = ref_name.strip().split('/')
branch_name = '/'.join(chunks[2:])
return branch_name in protected or any(
re.match(p, branch_name) for p in patterns
)
def main(argv=None): # type: (Optional[Sequence[str]]) -> int
parser = argparse.ArgumentParser()
parser.add_argument(
'-b', '--branch', action='append',
help='branch to disallow commits to, may be specified multiple times',
)
parser.add_argument(
'-p', '--pattern', action='append',
help=(
'regex pattern for branch name to disallow commits to, '
'May be specified multiple times'
),
)
args = parser.parse_args(argv)
protected = set(args.branch or ('master',))
patterns = set(args.pattern or ())
return int(is_on_branch(protected, patterns))
if __name__ == '__main__':
exit(main())
## Instruction:
Make optional argument use an immutable set for the default value in no-commit-to-branch. Make other sets immutable to satisfy type-checking and be consistent
## Code After:
from __future__ import print_function
import argparse
import re
from typing import FrozenSet
from typing import Optional
from typing import Sequence
from pre_commit_hooks.util import CalledProcessError
from pre_commit_hooks.util import cmd_output
def is_on_branch(protected, patterns=frozenset()):
# type: (FrozenSet[str], FrozenSet[str]) -> bool
try:
ref_name = cmd_output('git', 'symbolic-ref', 'HEAD')
except CalledProcessError:
return False
chunks = ref_name.strip().split('/')
branch_name = '/'.join(chunks[2:])
return branch_name in protected or any(
re.match(p, branch_name) for p in patterns
)
def main(argv=None): # type: (Optional[Sequence[str]]) -> int
parser = argparse.ArgumentParser()
parser.add_argument(
'-b', '--branch', action='append',
help='branch to disallow commits to, may be specified multiple times',
)
parser.add_argument(
'-p', '--pattern', action='append',
help=(
'regex pattern for branch name to disallow commits to, '
'may be specified multiple times'
),
)
args = parser.parse_args(argv)
protected = frozenset(args.branch or ('master',))
patterns = frozenset(args.pattern or ())
return int(is_on_branch(protected, patterns))
if __name__ == '__main__':
exit(main())
|
from __future__ import print_function
import argparse
import re
+ from typing import FrozenSet
from typing import Optional
from typing import Sequence
- from typing import Set
from pre_commit_hooks.util import CalledProcessError
from pre_commit_hooks.util import cmd_output
- def is_on_branch(protected, patterns=set()):
+ def is_on_branch(protected, patterns=frozenset()):
? ++++++
- # type: (Set[str], Set[str]) -> bool
+ # type: (FrozenSet[str], FrozenSet[str]) -> bool
? ++++++ ++++++
try:
ref_name = cmd_output('git', 'symbolic-ref', 'HEAD')
except CalledProcessError:
return False
chunks = ref_name.strip().split('/')
branch_name = '/'.join(chunks[2:])
return branch_name in protected or any(
re.match(p, branch_name) for p in patterns
)
def main(argv=None): # type: (Optional[Sequence[str]]) -> int
parser = argparse.ArgumentParser()
parser.add_argument(
'-b', '--branch', action='append',
help='branch to disallow commits to, may be specified multiple times',
)
parser.add_argument(
'-p', '--pattern', action='append',
help=(
'regex pattern for branch name to disallow commits to, '
- 'May be specified multiple times'
? ^
+ 'may be specified multiple times'
? ^
),
)
args = parser.parse_args(argv)
- protected = set(args.branch or ('master',))
+ protected = frozenset(args.branch or ('master',))
? ++++++
- patterns = set(args.pattern or ())
+ patterns = frozenset(args.pattern or ())
? ++++++
return int(is_on_branch(protected, patterns))
if __name__ == '__main__':
exit(main())
|
6c3f869150e5797c06b5f63758280b60e296d658
|
core/admin.py
|
core/admin.py
|
from django.contrib import admin
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
class NavigatorLoginForm(AdminAuthenticationForm):
username = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'off'}))
admin.site.login_form = NavigatorLoginForm
|
from django.contrib import admin
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
class NavigatorLoginForm(AdminAuthenticationForm):
username = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'off'}))
admin.site.login_form = NavigatorLoginForm
def get_actions_replacer(orig_func):
def fixed_get_actions(self, request):
"""
Remove the delete action (if present) if user does not have the
necessary permission
"""
# Get the base actions
actions = orig_func(self, request)
# Get the app label and model name to form the permission name
app_label = self.model._meta.app_label
model_name = self.model._meta.model_name
perm = "%s.delete_%s" % (app_label, model_name)
# If the user does not have the specific delete perm, remove the action
if not request.user.has_perm(perm):
if 'delete_selected' in actions:
del actions['delete_selected']
return actions
return fixed_get_actions
admin.ModelAdmin.get_actions = get_actions_replacer(admin.ModelAdmin.get_actions)
|
Remove the bulk delete action if the user does not have delete permissions on the model being viewed
|
Remove the bulk delete action if the user does not have delete permissions on the model being viewed
|
Python
|
mit
|
uktrade/navigator,uktrade/navigator,uktrade/navigator,uktrade/navigator
|
from django.contrib import admin
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
class NavigatorLoginForm(AdminAuthenticationForm):
username = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'off'}))
admin.site.login_form = NavigatorLoginForm
+
+ def get_actions_replacer(orig_func):
+ def fixed_get_actions(self, request):
+ """
+ Remove the delete action (if present) if user does not have the
+ necessary permission
+ """
+
+ # Get the base actions
+ actions = orig_func(self, request)
+ # Get the app label and model name to form the permission name
+ app_label = self.model._meta.app_label
+ model_name = self.model._meta.model_name
+ perm = "%s.delete_%s" % (app_label, model_name)
+ # If the user does not have the specific delete perm, remove the action
+ if not request.user.has_perm(perm):
+ if 'delete_selected' in actions:
+ del actions['delete_selected']
+
+ return actions
+ return fixed_get_actions
+
+ admin.ModelAdmin.get_actions = get_actions_replacer(admin.ModelAdmin.get_actions)
+
|
Remove the bulk delete action if the user does not have delete permissions on the model being viewed
|
## Code Before:
from django.contrib import admin
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
class NavigatorLoginForm(AdminAuthenticationForm):
username = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'off'}))
admin.site.login_form = NavigatorLoginForm
## Instruction:
Remove the bulk delete action if the user does not have delete permissions on the model being viewed
## Code After:
from django.contrib import admin
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
class NavigatorLoginForm(AdminAuthenticationForm):
username = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'off'}))
admin.site.login_form = NavigatorLoginForm
def get_actions_replacer(orig_func):
def fixed_get_actions(self, request):
"""
Remove the delete action (if present) if user does not have the
necessary permission
"""
# Get the base actions
actions = orig_func(self, request)
# Get the app label and model name to form the permission name
app_label = self.model._meta.app_label
model_name = self.model._meta.model_name
perm = "%s.delete_%s" % (app_label, model_name)
# If the user does not have the specific delete perm, remove the action
if not request.user.has_perm(perm):
if 'delete_selected' in actions:
del actions['delete_selected']
return actions
return fixed_get_actions
admin.ModelAdmin.get_actions = get_actions_replacer(admin.ModelAdmin.get_actions)
|
from django.contrib import admin
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
class NavigatorLoginForm(AdminAuthenticationForm):
username = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'off'}))
admin.site.login_form = NavigatorLoginForm
+
+
+ def get_actions_replacer(orig_func):
+ def fixed_get_actions(self, request):
+ """
+ Remove the delete action (if present) if user does not have the
+ necessary permission
+ """
+
+ # Get the base actions
+ actions = orig_func(self, request)
+ # Get the app label and model name to form the permission name
+ app_label = self.model._meta.app_label
+ model_name = self.model._meta.model_name
+ perm = "%s.delete_%s" % (app_label, model_name)
+ # If the user does not have the specific delete perm, remove the action
+ if not request.user.has_perm(perm):
+ if 'delete_selected' in actions:
+ del actions['delete_selected']
+
+ return actions
+ return fixed_get_actions
+
+ admin.ModelAdmin.get_actions = get_actions_replacer(admin.ModelAdmin.get_actions)
|
57b4d39749021305a2d5850e642537224d30611f
|
requests/hooks.py
|
requests/hooks.py
|
import traceback
HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response')
def dispatch_hook(key, hooks, hook_data):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict()
if key in hooks:
hooks = hooks.get(key)
if hasattr(hooks, '__call__'):
hooks = [hooks]
for hook in hooks:
try:
_hook_data = hook(hook_data)
if _hook_data is not None:
hook_data = _hook_data
except Exception:
traceback.print_exc()
return hook_data
|
HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response')
def dispatch_hook(key, hooks, hook_data):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict()
if key in hooks:
hooks = hooks.get(key)
if hasattr(hooks, '__call__'):
hooks = [hooks]
for hook in hooks:
_hook_data = hook(hook_data)
if _hook_data is not None:
hook_data = _hook_data
return hook_data
|
Remove exception eating from dispatch_hook.
|
Remove exception eating from dispatch_hook.
|
Python
|
isc
|
Bluehorn/requests,revolunet/requests,revolunet/requests,psf/requests
|
-
- import traceback
HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response')
def dispatch_hook(key, hooks, hook_data):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict()
if key in hooks:
hooks = hooks.get(key)
if hasattr(hooks, '__call__'):
hooks = [hooks]
for hook in hooks:
- try:
- _hook_data = hook(hook_data)
+ _hook_data = hook(hook_data)
- if _hook_data is not None:
+ if _hook_data is not None:
- hook_data = _hook_data
+ hook_data = _hook_data
- except Exception:
- traceback.print_exc()
return hook_data
|
Remove exception eating from dispatch_hook.
|
## Code Before:
import traceback
HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response')
def dispatch_hook(key, hooks, hook_data):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict()
if key in hooks:
hooks = hooks.get(key)
if hasattr(hooks, '__call__'):
hooks = [hooks]
for hook in hooks:
try:
_hook_data = hook(hook_data)
if _hook_data is not None:
hook_data = _hook_data
except Exception:
traceback.print_exc()
return hook_data
## Instruction:
Remove exception eating from dispatch_hook.
## Code After:
HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response')
def dispatch_hook(key, hooks, hook_data):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict()
if key in hooks:
hooks = hooks.get(key)
if hasattr(hooks, '__call__'):
hooks = [hooks]
for hook in hooks:
_hook_data = hook(hook_data)
if _hook_data is not None:
hook_data = _hook_data
return hook_data
|
-
- import traceback
HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response')
def dispatch_hook(key, hooks, hook_data):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict()
if key in hooks:
hooks = hooks.get(key)
if hasattr(hooks, '__call__'):
hooks = [hooks]
for hook in hooks:
- try:
- _hook_data = hook(hook_data)
? ----
+ _hook_data = hook(hook_data)
- if _hook_data is not None:
? ----
+ if _hook_data is not None:
- hook_data = _hook_data
? ----
+ hook_data = _hook_data
- except Exception:
- traceback.print_exc()
return hook_data
|
5fbf5bac84a7ededde99e109d206681af99c112c
|
sort/bubble_sort_optimized/python/bubble_sort_optimized.py
|
sort/bubble_sort_optimized/python/bubble_sort_optimized.py
|
class Solution:
def bubbleSortOptimized(self, nums: List[int]) -> None:
if len(nums) == 1:
return nums
else:
swapped = False
while not swapped:
swapped = True
for i in range(0, len(nums)-1):
if nums[i] > nums[i+1]:
swapped = False
nums[i], nums[i+1] = nums[i+1], nums[i]
|
class Solution:
def bubbleSortOptimized(self, nums):
if len(nums) == 1:
return nums
else:
swapped = False
while not swapped:
swapped = True
for i in range(0, len(nums)-1):
if nums[i] > nums[i+1]:
swapped = False
nums[i], nums[i+1] = nums[i+1], nums[i]
|
Add dijkstras algorithm and depth first search in python
|
Add dijkstras algorithm and depth first search in python
|
Python
|
cc0-1.0
|
ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms
|
class Solution:
- def bubbleSortOptimized(self, nums: List[int]) -> None:
+ def bubbleSortOptimized(self, nums):
if len(nums) == 1:
return nums
else:
swapped = False
while not swapped:
swapped = True
for i in range(0, len(nums)-1):
if nums[i] > nums[i+1]:
swapped = False
nums[i], nums[i+1] = nums[i+1], nums[i]
|
Add dijkstras algorithm and depth first search in python
|
## Code Before:
class Solution:
def bubbleSortOptimized(self, nums: List[int]) -> None:
if len(nums) == 1:
return nums
else:
swapped = False
while not swapped:
swapped = True
for i in range(0, len(nums)-1):
if nums[i] > nums[i+1]:
swapped = False
nums[i], nums[i+1] = nums[i+1], nums[i]
## Instruction:
Add dijkstras algorithm and depth first search in python
## Code After:
class Solution:
def bubbleSortOptimized(self, nums):
if len(nums) == 1:
return nums
else:
swapped = False
while not swapped:
swapped = True
for i in range(0, len(nums)-1):
if nums[i] > nums[i+1]:
swapped = False
nums[i], nums[i+1] = nums[i+1], nums[i]
|
class Solution:
- def bubbleSortOptimized(self, nums: List[int]) -> None:
? --------------------
+ def bubbleSortOptimized(self, nums):
? +
if len(nums) == 1:
return nums
else:
swapped = False
while not swapped:
swapped = True
for i in range(0, len(nums)-1):
if nums[i] > nums[i+1]:
swapped = False
nums[i], nums[i+1] = nums[i+1], nums[i]
|
b5bf3a92309dac40deefbaea555e4d96aa6bca62
|
jesusmtnez/python/kata/tests/test_game.py
|
jesusmtnez/python/kata/tests/test_game.py
|
import unittest
from game import Game
class BowlingGameTest(unittest.TestCase):
def setUp(self):
self.g = Game()
def tearDown(self):
self.g = None
def test_gutter_game(self):
for i in range(20):
self.g.roll(0);
self.assertEqual(0, self.g.score())
def test_all_ones(self):
for i in range(20):
self.g.roll(1)
self.assertEqual(20, self.g.score())
if __name__ == '__main__':
unittest.main()
|
import unittest
from game import Game
class BowlingGameTest(unittest.TestCase):
def setUp(self):
self.g = Game()
def tearDown(self):
self.g = None
def _roll_many(self, n, pins):
"Roll 'n' times a roll of 'pins' pins"
for i in range(n):
self.g.roll(pins)
def test_gutter_game(self):
self._roll_many(20, 0)
self.assertEqual(0, self.g.score())
def test_all_ones(self):
self._roll_many(20, 1)
self.assertEqual(20, self.g.score())
if __name__ == '__main__':
unittest.main()
|
Refactor for code to roll many times
|
[Python] Refactor for code to roll many times
|
Python
|
mit
|
JesusMtnez/devexperto-challenge,JesusMtnez/devexperto-challenge
|
import unittest
from game import Game
class BowlingGameTest(unittest.TestCase):
def setUp(self):
self.g = Game()
def tearDown(self):
self.g = None
+ def _roll_many(self, n, pins):
+ "Roll 'n' times a roll of 'pins' pins"
+ for i in range(n):
+ self.g.roll(pins)
+
def test_gutter_game(self):
+ self._roll_many(20, 0)
- for i in range(20):
- self.g.roll(0);
-
self.assertEqual(0, self.g.score())
def test_all_ones(self):
+ self._roll_many(20, 1)
- for i in range(20):
- self.g.roll(1)
-
self.assertEqual(20, self.g.score())
if __name__ == '__main__':
unittest.main()
|
Refactor for code to roll many times
|
## Code Before:
import unittest
from game import Game
class BowlingGameTest(unittest.TestCase):
def setUp(self):
self.g = Game()
def tearDown(self):
self.g = None
def test_gutter_game(self):
for i in range(20):
self.g.roll(0);
self.assertEqual(0, self.g.score())
def test_all_ones(self):
for i in range(20):
self.g.roll(1)
self.assertEqual(20, self.g.score())
if __name__ == '__main__':
unittest.main()
## Instruction:
Refactor for code to roll many times
## Code After:
import unittest
from game import Game
class BowlingGameTest(unittest.TestCase):
def setUp(self):
self.g = Game()
def tearDown(self):
self.g = None
def _roll_many(self, n, pins):
"Roll 'n' times a roll of 'pins' pins"
for i in range(n):
self.g.roll(pins)
def test_gutter_game(self):
self._roll_many(20, 0)
self.assertEqual(0, self.g.score())
def test_all_ones(self):
self._roll_many(20, 1)
self.assertEqual(20, self.g.score())
if __name__ == '__main__':
unittest.main()
|
import unittest
from game import Game
class BowlingGameTest(unittest.TestCase):
def setUp(self):
self.g = Game()
def tearDown(self):
self.g = None
+ def _roll_many(self, n, pins):
+ "Roll 'n' times a roll of 'pins' pins"
+ for i in range(n):
+ self.g.roll(pins)
+
def test_gutter_game(self):
+ self._roll_many(20, 0)
- for i in range(20):
- self.g.roll(0);
-
self.assertEqual(0, self.g.score())
def test_all_ones(self):
+ self._roll_many(20, 1)
- for i in range(20):
- self.g.roll(1)
-
self.assertEqual(20, self.g.score())
if __name__ == '__main__':
unittest.main()
|
2bdbcf41cc99f9c7430f8d429cc8d2a5e2ee6701
|
pyrho/NEURON/minimal.py
|
pyrho/NEURON/minimal.py
|
cell = h.SectionList()
soma = h.Section(name='soma') #create soma
soma.push()
#h.topology()
# Geometry
soma.nseg = 1
soma.L = 20
soma.diam = 20
# Biophysics
for sec in h.allsec():
sec.Ra = 100
sec.cm = 1
sec.insert('pas')
#sec.insert('hh') # insert hh
cell.append(sec)
#h('objref rho')
#h('rho = new ChR(0.5)')
#h.rho.Er = Prot.phis[0]
#setattr(h.rho, 'del', Prot.pulses[0][0]) # rho.del will not work because del is reserved word in python
#h.rho.ton = Prot.onDs[0]
#h.rho.toff = Prot.offDs[0]
#h.rho.num = Prot.nPulses
#h.rho.gbar = RhO.g/20000
# Pick a rhodopsin to record from
#rhoRec = h.ChR_apic.o(7)
h.pop_section()
return cell
|
from neuron import h
cell = h.SectionList()
soma = h.Section(name='soma') #create soma
soma.push()
#h.topology()
# Geometry
soma.nseg = 1
soma.L = 20
soma.diam = 20
# Biophysics
for sec in h.allsec():
sec.Ra = 100
sec.cm = 1
sec.insert('pas')
#sec.insert('hh') # insert hh
cell.append(sec)
#h('objref rho')
#h('rho = new ChR(0.5)')
#h.rho.Er = Prot.phis[0]
#setattr(h.rho, 'del', Prot.pulses[0][0]) # rho.del will not work because del is reserved word in python
#h.rho.ton = Prot.onDs[0]
#h.rho.toff = Prot.offDs[0]
#h.rho.num = Prot.nPulses
#h.rho.gbar = RhO.g/20000
# Pick a rhodopsin to record from
#rhoRec = h.ChR_apic.o(7)
h.pop_section()
#return cell
|
Add import to stop warnings
|
Add import to stop warnings
|
Python
|
bsd-3-clause
|
ProjectPyRhO/PyRhO,ProjectPyRhO/PyRhO
|
+ from neuron import h
cell = h.SectionList()
soma = h.Section(name='soma') #create soma
soma.push()
#h.topology()
# Geometry
soma.nseg = 1
soma.L = 20
soma.diam = 20
# Biophysics
for sec in h.allsec():
sec.Ra = 100
sec.cm = 1
sec.insert('pas')
#sec.insert('hh') # insert hh
cell.append(sec)
#h('objref rho')
#h('rho = new ChR(0.5)')
#h.rho.Er = Prot.phis[0]
#setattr(h.rho, 'del', Prot.pulses[0][0]) # rho.del will not work because del is reserved word in python
#h.rho.ton = Prot.onDs[0]
#h.rho.toff = Prot.offDs[0]
#h.rho.num = Prot.nPulses
#h.rho.gbar = RhO.g/20000
# Pick a rhodopsin to record from
#rhoRec = h.ChR_apic.o(7)
h.pop_section()
- return cell
+ #return cell
|
Add import to stop warnings
|
## Code Before:
cell = h.SectionList()
soma = h.Section(name='soma') #create soma
soma.push()
#h.topology()
# Geometry
soma.nseg = 1
soma.L = 20
soma.diam = 20
# Biophysics
for sec in h.allsec():
sec.Ra = 100
sec.cm = 1
sec.insert('pas')
#sec.insert('hh') # insert hh
cell.append(sec)
#h('objref rho')
#h('rho = new ChR(0.5)')
#h.rho.Er = Prot.phis[0]
#setattr(h.rho, 'del', Prot.pulses[0][0]) # rho.del will not work because del is reserved word in python
#h.rho.ton = Prot.onDs[0]
#h.rho.toff = Prot.offDs[0]
#h.rho.num = Prot.nPulses
#h.rho.gbar = RhO.g/20000
# Pick a rhodopsin to record from
#rhoRec = h.ChR_apic.o(7)
h.pop_section()
return cell
## Instruction:
Add import to stop warnings
## Code After:
from neuron import h
cell = h.SectionList()
soma = h.Section(name='soma') #create soma
soma.push()
#h.topology()
# Geometry
soma.nseg = 1
soma.L = 20
soma.diam = 20
# Biophysics
for sec in h.allsec():
sec.Ra = 100
sec.cm = 1
sec.insert('pas')
#sec.insert('hh') # insert hh
cell.append(sec)
#h('objref rho')
#h('rho = new ChR(0.5)')
#h.rho.Er = Prot.phis[0]
#setattr(h.rho, 'del', Prot.pulses[0][0]) # rho.del will not work because del is reserved word in python
#h.rho.ton = Prot.onDs[0]
#h.rho.toff = Prot.offDs[0]
#h.rho.num = Prot.nPulses
#h.rho.gbar = RhO.g/20000
# Pick a rhodopsin to record from
#rhoRec = h.ChR_apic.o(7)
h.pop_section()
#return cell
|
+ from neuron import h
cell = h.SectionList()
soma = h.Section(name='soma') #create soma
soma.push()
#h.topology()
# Geometry
soma.nseg = 1
soma.L = 20
soma.diam = 20
# Biophysics
for sec in h.allsec():
sec.Ra = 100
sec.cm = 1
sec.insert('pas')
#sec.insert('hh') # insert hh
cell.append(sec)
#h('objref rho')
#h('rho = new ChR(0.5)')
#h.rho.Er = Prot.phis[0]
#setattr(h.rho, 'del', Prot.pulses[0][0]) # rho.del will not work because del is reserved word in python
#h.rho.ton = Prot.onDs[0]
#h.rho.toff = Prot.offDs[0]
#h.rho.num = Prot.nPulses
#h.rho.gbar = RhO.g/20000
# Pick a rhodopsin to record from
#rhoRec = h.ChR_apic.o(7)
h.pop_section()
- return cell
+ #return cell
? +
|
c7723ff6d7f43330786e84802ef0bacf70d4ba67
|
instatrace/commands.py
|
instatrace/commands.py
|
import logging
import os
import sys
import time
from .stats import Histogram, Statistics
log = logging.getLogger("instatrace")
class HistogramsCommand:
@classmethod
def add_subparser(cls, parser):
subparser = parser.add_parser("histograms", help="Stat histograms")
subparser.add_argument("file", nargs="+")
subparser.set_defaults(run=cls.run)
@staticmethod
def run(args):
stats = Statistics()
for filename in args.file:
count = 0
fd = open(filename)
for line in fd.xreadlines():
line = line.strip()
stat = line.split(" ", 2)
stats.add_sample(stat[0], int(stat[1]))
fd.close()
names = stats.statistics.keys()
names.sort()
for name in names:
histogram = stats.statistics.get(name)
histogram.text(sys.stdout)
|
import logging
import os
import sys
import time
from .stats import Histogram, Statistics
log = logging.getLogger("instatrace")
class HistogramsCommand:
@classmethod
def add_subparser(cls, parser):
subparser = parser.add_parser("histograms", help="Stat histograms")
subparser.add_argument("--filter", action="store_true",
help="Filter out any lines that don't contain INSTATRACE")
subparser.add_argument("file", nargs="+")
subparser.set_defaults(run=cls.run,
filter_marker="INSTATRACE: ")
@staticmethod
def run(args):
stats = Statistics()
for filename in args.file:
count = 0
fd = open(filename)
for line in fd.xreadlines():
if args.filter:
pos = line.find(args.filter_marker)
if pos == -1:
continue
line = line[pos+len(args.filter_marker):]
line = line.strip()
stat = line.split(" ", 2)
stats.add_sample(stat[0], int(stat[1]))
fd.close()
names = stats.statistics.keys()
names.sort()
for name in names:
histogram = stats.statistics.get(name)
histogram.text(sys.stdout)
|
Add a --filter flag to histograms
|
Add a --filter flag to histograms
This ignores any lines in the input that don't contain "INSTATRACE: "
and removes anything preceding that string before handling the sample.
|
Python
|
mit
|
pteichman/instatrace
|
import logging
import os
import sys
import time
from .stats import Histogram, Statistics
log = logging.getLogger("instatrace")
class HistogramsCommand:
@classmethod
def add_subparser(cls, parser):
subparser = parser.add_parser("histograms", help="Stat histograms")
+ subparser.add_argument("--filter", action="store_true",
+ help="Filter out any lines that don't contain INSTATRACE")
subparser.add_argument("file", nargs="+")
- subparser.set_defaults(run=cls.run)
+ subparser.set_defaults(run=cls.run,
+ filter_marker="INSTATRACE: ")
@staticmethod
def run(args):
stats = Statistics()
for filename in args.file:
count = 0
fd = open(filename)
for line in fd.xreadlines():
+ if args.filter:
+ pos = line.find(args.filter_marker)
+ if pos == -1:
+ continue
+ line = line[pos+len(args.filter_marker):]
+
line = line.strip()
stat = line.split(" ", 2)
stats.add_sample(stat[0], int(stat[1]))
fd.close()
names = stats.statistics.keys()
names.sort()
for name in names:
histogram = stats.statistics.get(name)
histogram.text(sys.stdout)
|
Add a --filter flag to histograms
|
## Code Before:
import logging
import os
import sys
import time
from .stats import Histogram, Statistics
log = logging.getLogger("instatrace")
class HistogramsCommand:
@classmethod
def add_subparser(cls, parser):
subparser = parser.add_parser("histograms", help="Stat histograms")
subparser.add_argument("file", nargs="+")
subparser.set_defaults(run=cls.run)
@staticmethod
def run(args):
stats = Statistics()
for filename in args.file:
count = 0
fd = open(filename)
for line in fd.xreadlines():
line = line.strip()
stat = line.split(" ", 2)
stats.add_sample(stat[0], int(stat[1]))
fd.close()
names = stats.statistics.keys()
names.sort()
for name in names:
histogram = stats.statistics.get(name)
histogram.text(sys.stdout)
## Instruction:
Add a --filter flag to histograms
## Code After:
import logging
import os
import sys
import time
from .stats import Histogram, Statistics
log = logging.getLogger("instatrace")
class HistogramsCommand:
@classmethod
def add_subparser(cls, parser):
subparser = parser.add_parser("histograms", help="Stat histograms")
subparser.add_argument("--filter", action="store_true",
help="Filter out any lines that don't contain INSTATRACE")
subparser.add_argument("file", nargs="+")
subparser.set_defaults(run=cls.run,
filter_marker="INSTATRACE: ")
@staticmethod
def run(args):
stats = Statistics()
for filename in args.file:
count = 0
fd = open(filename)
for line in fd.xreadlines():
if args.filter:
pos = line.find(args.filter_marker)
if pos == -1:
continue
line = line[pos+len(args.filter_marker):]
line = line.strip()
stat = line.split(" ", 2)
stats.add_sample(stat[0], int(stat[1]))
fd.close()
names = stats.statistics.keys()
names.sort()
for name in names:
histogram = stats.statistics.get(name)
histogram.text(sys.stdout)
|
import logging
import os
import sys
import time
from .stats import Histogram, Statistics
log = logging.getLogger("instatrace")
class HistogramsCommand:
@classmethod
def add_subparser(cls, parser):
subparser = parser.add_parser("histograms", help="Stat histograms")
+ subparser.add_argument("--filter", action="store_true",
+ help="Filter out any lines that don't contain INSTATRACE")
subparser.add_argument("file", nargs="+")
- subparser.set_defaults(run=cls.run)
? ^
+ subparser.set_defaults(run=cls.run,
? ^
+ filter_marker="INSTATRACE: ")
@staticmethod
def run(args):
stats = Statistics()
for filename in args.file:
count = 0
fd = open(filename)
for line in fd.xreadlines():
+ if args.filter:
+ pos = line.find(args.filter_marker)
+ if pos == -1:
+ continue
+ line = line[pos+len(args.filter_marker):]
+
line = line.strip()
stat = line.split(" ", 2)
stats.add_sample(stat[0], int(stat[1]))
fd.close()
names = stats.statistics.keys()
names.sort()
for name in names:
histogram = stats.statistics.get(name)
histogram.text(sys.stdout)
|
5768d1ebcfec46e564c8b420773d911c243327ff
|
dddp/msg.py
|
dddp/msg.py
|
"""Django DDP utils for DDP messaging."""
import collections
from django.core.serializers import get_serializer
_SERIALIZER = None
def obj_change_as_msg(obj, msg):
"""Generate a DDP msg for obj with specified msg type."""
global _SERIALIZER
if _SERIALIZER is None:
_SERIALIZER = get_serializer('ddp')()
data = _SERIALIZER.serialize([obj])[0]
name = data['model']
# cast ID as string
if not isinstance(data['pk'], basestring):
data['pk'] = '%d' % data['pk']
payload = {
'msg': msg,
'collection': name,
'id': data['pk'],
}
if msg != 'removed':
payload['fields'] = data['fields']
return (name, payload)
|
"""Django DDP utils for DDP messaging."""
from dddp import THREAD_LOCAL as this
from django.core.serializers import get_serializer
def serializer_factory():
"""Make a new DDP serializer."""
return get_serializer('ddp')()
def obj_change_as_msg(obj, msg):
"""Generate a DDP msg for obj with specified msg type."""
serializer = this.get('serializer', serializer_factory)
data = serializer.serialize([obj])[0]
name = data['model']
# cast ID as string
if not isinstance(data['pk'], basestring):
data['pk'] = '%d' % data['pk']
payload = {
'msg': msg,
'collection': name,
'id': data['pk'],
}
if msg != 'removed':
payload['fields'] = data['fields']
return (name, payload)
|
Fix non-threadsafe failure in serializer - now using thread local serializer instance.
|
Fix non-threadsafe failure in serializer - now using thread local serializer instance.
|
Python
|
mit
|
commoncode/django-ddp,commoncode/django-ddp,commoncode/django-ddp,django-ddp/django-ddp,django-ddp/django-ddp,django-ddp/django-ddp,commoncode/django-ddp,django-ddp/django-ddp
|
"""Django DDP utils for DDP messaging."""
- import collections
+ from dddp import THREAD_LOCAL as this
from django.core.serializers import get_serializer
- _SERIALIZER = None
+
+ def serializer_factory():
+ """Make a new DDP serializer."""
+ return get_serializer('ddp')()
+
def obj_change_as_msg(obj, msg):
"""Generate a DDP msg for obj with specified msg type."""
+ serializer = this.get('serializer', serializer_factory)
+ data = serializer.serialize([obj])[0]
- global _SERIALIZER
- if _SERIALIZER is None:
- _SERIALIZER = get_serializer('ddp')()
- data = _SERIALIZER.serialize([obj])[0]
name = data['model']
# cast ID as string
if not isinstance(data['pk'], basestring):
data['pk'] = '%d' % data['pk']
payload = {
'msg': msg,
'collection': name,
'id': data['pk'],
}
if msg != 'removed':
payload['fields'] = data['fields']
return (name, payload)
|
Fix non-threadsafe failure in serializer - now using thread local serializer instance.
|
## Code Before:
"""Django DDP utils for DDP messaging."""
import collections
from django.core.serializers import get_serializer
_SERIALIZER = None
def obj_change_as_msg(obj, msg):
"""Generate a DDP msg for obj with specified msg type."""
global _SERIALIZER
if _SERIALIZER is None:
_SERIALIZER = get_serializer('ddp')()
data = _SERIALIZER.serialize([obj])[0]
name = data['model']
# cast ID as string
if not isinstance(data['pk'], basestring):
data['pk'] = '%d' % data['pk']
payload = {
'msg': msg,
'collection': name,
'id': data['pk'],
}
if msg != 'removed':
payload['fields'] = data['fields']
return (name, payload)
## Instruction:
Fix non-threadsafe failure in serializer - now using thread local serializer instance.
## Code After:
"""Django DDP utils for DDP messaging."""
from dddp import THREAD_LOCAL as this
from django.core.serializers import get_serializer
def serializer_factory():
"""Make a new DDP serializer."""
return get_serializer('ddp')()
def obj_change_as_msg(obj, msg):
"""Generate a DDP msg for obj with specified msg type."""
serializer = this.get('serializer', serializer_factory)
data = serializer.serialize([obj])[0]
name = data['model']
# cast ID as string
if not isinstance(data['pk'], basestring):
data['pk'] = '%d' % data['pk']
payload = {
'msg': msg,
'collection': name,
'id': data['pk'],
}
if msg != 'removed':
payload['fields'] = data['fields']
return (name, payload)
|
"""Django DDP utils for DDP messaging."""
- import collections
+ from dddp import THREAD_LOCAL as this
from django.core.serializers import get_serializer
- _SERIALIZER = None
+
+ def serializer_factory():
+ """Make a new DDP serializer."""
+ return get_serializer('ddp')()
+
def obj_change_as_msg(obj, msg):
"""Generate a DDP msg for obj with specified msg type."""
+ serializer = this.get('serializer', serializer_factory)
+ data = serializer.serialize([obj])[0]
- global _SERIALIZER
- if _SERIALIZER is None:
- _SERIALIZER = get_serializer('ddp')()
- data = _SERIALIZER.serialize([obj])[0]
name = data['model']
# cast ID as string
if not isinstance(data['pk'], basestring):
data['pk'] = '%d' % data['pk']
payload = {
'msg': msg,
'collection': name,
'id': data['pk'],
}
if msg != 'removed':
payload['fields'] = data['fields']
return (name, payload)
|
ee070606be405b86bfcc6e6796bbe322a78511ed
|
ui/assetmanager.py
|
ui/assetmanager.py
|
"""Loads and manages art assets"""
import pyglet
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(p)
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
"""Loads and manages art assets"""
import pyglet
import os
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(os.path.join(os.getcwd(), p))
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
Use absolute resource path in Pyglet
|
Use absolute resource path in Pyglet
It appears that a recent change in Pyglet causes relative paths to fail here.
|
Python
|
bsd-2-clause
|
aschmied/keyzer
|
"""Loads and manages art assets"""
import pyglet
+ import os
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
- pyglet.resource.path.append(p)
+ pyglet.resource.path.append(os.path.join(os.getcwd(), p))
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
Use absolute resource path in Pyglet
|
## Code Before:
"""Loads and manages art assets"""
import pyglet
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(p)
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
## Instruction:
Use absolute resource path in Pyglet
## Code After:
"""Loads and manages art assets"""
import pyglet
import os
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(os.path.join(os.getcwd(), p))
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
"""Loads and manages art assets"""
import pyglet
+ import os
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
- pyglet.resource.path.append(p)
+ pyglet.resource.path.append(os.path.join(os.getcwd(), p))
? ++++++++++++++++++++++++++ +
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
b964c71509d1c562d4080a39bf5fc7333da39608
|
fedora/__init__.py
|
fedora/__init__.py
|
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
from fedora import release
__version__ = release.VERSION
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
|
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
|
Rearrange imports of gettext and release because of dependencies in circular import.
|
Rearrange imports of gettext and release because of dependencies in
circular import.
|
Python
|
lgpl-2.1
|
fedora-infra/python-fedora
|
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
- from fedora import release
- __version__ = release.VERSION
-
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
+ from fedora import release
+ __version__ = release.VERSION
+
|
Rearrange imports of gettext and release because of dependencies in circular import.
|
## Code Before:
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
from fedora import release
__version__ = release.VERSION
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
## Instruction:
Rearrange imports of gettext and release because of dependencies in circular import.
## Code After:
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
|
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
- from fedora import release
- __version__ = release.VERSION
-
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
+
+ from fedora import release
+ __version__ = release.VERSION
|
25ebc324c0af6e1ce74535cc75227071637a7a18
|
areaScraper.py
|
areaScraper.py
|
from bs4 import BeautifulSoup
import re
fh = open("sites.htm", "r")
soup = BeautifulSoup(fh, "html.parser")
for columnDiv in soup.h1.next_sibling.next_sibling:
for state in columnDiv:
for city in state:
print(city)
#print(soup.text)
print("\n----Done----\n\n")
|
from bs4 import BeautifulSoup
import re
def getCities():
fh = open("sites.htm", "r")
soup = BeautifulSoup(fh, "html.parser")
placesDict = {}
for columnDiv in soup.h1.next_sibling.next_sibling:
for state in columnDiv:
for city in state:
m = (re.search('<li><a href="(.+)">(.+)</a>', str(city)))
if m:
placesDict[m.group(2)] = m.group(1)
return(placesDict)
getCities()
|
Complete site scraper for all American cities
|
Complete site scraper for all American cities
areaScraper.py contains the getCities() function, which will
return a dictionary of 'city name string' : 'url string'
for each Craigslist "site", corresponding to American cities,
regions, etc.
|
Python
|
mit
|
MuSystemsAnalysis/craigslist_area_search,MuSystemsAnalysis/craigslist_area_search
|
from bs4 import BeautifulSoup
import re
+ def getCities():
- fh = open("sites.htm", "r")
- soup = BeautifulSoup(fh, "html.parser")
+ fh = open("sites.htm", "r")
+ soup = BeautifulSoup(fh, "html.parser")
+ placesDict = {}
- for columnDiv in soup.h1.next_sibling.next_sibling:
- for state in columnDiv:
- for city in state:
- print(city)
- #print(soup.text)
- print("\n----Done----\n\n")
+ for columnDiv in soup.h1.next_sibling.next_sibling:
+ for state in columnDiv:
+ for city in state:
+ m = (re.search('<li><a href="(.+)">(.+)</a>', str(city)))
+ if m:
+ placesDict[m.group(2)] = m.group(1)
+ return(placesDict)
+
+ getCities()
+
|
Complete site scraper for all American cities
|
## Code Before:
from bs4 import BeautifulSoup
import re
fh = open("sites.htm", "r")
soup = BeautifulSoup(fh, "html.parser")
for columnDiv in soup.h1.next_sibling.next_sibling:
for state in columnDiv:
for city in state:
print(city)
#print(soup.text)
print("\n----Done----\n\n")
## Instruction:
Complete site scraper for all American cities
## Code After:
from bs4 import BeautifulSoup
import re
def getCities():
fh = open("sites.htm", "r")
soup = BeautifulSoup(fh, "html.parser")
placesDict = {}
for columnDiv in soup.h1.next_sibling.next_sibling:
for state in columnDiv:
for city in state:
m = (re.search('<li><a href="(.+)">(.+)</a>', str(city)))
if m:
placesDict[m.group(2)] = m.group(1)
return(placesDict)
getCities()
|
from bs4 import BeautifulSoup
import re
+ def getCities():
- fh = open("sites.htm", "r")
- soup = BeautifulSoup(fh, "html.parser")
+ fh = open("sites.htm", "r")
+ soup = BeautifulSoup(fh, "html.parser")
+ placesDict = {}
- for columnDiv in soup.h1.next_sibling.next_sibling:
- for state in columnDiv:
- for city in state:
- print(city)
- #print(soup.text)
- print("\n----Done----\n\n")
+ for columnDiv in soup.h1.next_sibling.next_sibling:
+ for state in columnDiv:
+ for city in state:
+ m = (re.search('<li><a href="(.+)">(.+)</a>', str(city)))
+ if m:
+ placesDict[m.group(2)] = m.group(1)
+
+ return(placesDict)
+
+ getCities()
|
d017ca19b6d810387424e388656d5ff63244a1f7
|
tests/engine/file_loader_test.py
|
tests/engine/file_loader_test.py
|
import unittest
from engine import file_loader
class FileLoaderTest(unittest.TestCase):
def test_load_units(self):
dicts = file_loader.read_and_parse_json('units')
self.assertIsInstance(dicts, list)
self.assertGreater(len(dicts), 0)
for dict_ in dicts:
self.assertIsInstance(dict_, dict)
if __name__ == '__main__':
unittest.main()
|
import unittest
from engine import file_loader
class FileLoaderTest(unittest.TestCase):
def test_load_units(self):
dicts = file_loader.read_and_parse_json('units')
self.assertIsInstance(dicts, list)
self.assertGreater(len(dicts), 0)
for dict_ in dicts:
self.assertIsInstance(dict_, dict)
def testLoadStruct(self):
unit_map = file_loader.load_struct('units')
for unit_name, unit_args in unit_map.items():
self.assertIsInstance(unit_name, str)
self.assertIsInstance(unit_args, dict)
def testLoadEnum(self):
unit_map = file_loader.load_enum('attack_types')
self.assertIsInstance(unit_map, dict)
for unit_name, unit_enum in unit_map.items():
self.assertIsInstance(unit_name, str)
self.assertIsInstance(unit_enum, int)
if __name__ == '__main__':
unittest.main()
|
Include tests for file loading helpers
|
Include tests for file loading helpers
|
Python
|
mit
|
Tactique/game_engine,Tactique/game_engine
|
import unittest
from engine import file_loader
class FileLoaderTest(unittest.TestCase):
def test_load_units(self):
dicts = file_loader.read_and_parse_json('units')
self.assertIsInstance(dicts, list)
self.assertGreater(len(dicts), 0)
for dict_ in dicts:
self.assertIsInstance(dict_, dict)
+ def testLoadStruct(self):
+ unit_map = file_loader.load_struct('units')
+ for unit_name, unit_args in unit_map.items():
+ self.assertIsInstance(unit_name, str)
+ self.assertIsInstance(unit_args, dict)
+
+ def testLoadEnum(self):
+ unit_map = file_loader.load_enum('attack_types')
+ self.assertIsInstance(unit_map, dict)
+ for unit_name, unit_enum in unit_map.items():
+ self.assertIsInstance(unit_name, str)
+ self.assertIsInstance(unit_enum, int)
+
if __name__ == '__main__':
unittest.main()
|
Include tests for file loading helpers
|
## Code Before:
import unittest
from engine import file_loader
class FileLoaderTest(unittest.TestCase):
def test_load_units(self):
dicts = file_loader.read_and_parse_json('units')
self.assertIsInstance(dicts, list)
self.assertGreater(len(dicts), 0)
for dict_ in dicts:
self.assertIsInstance(dict_, dict)
if __name__ == '__main__':
unittest.main()
## Instruction:
Include tests for file loading helpers
## Code After:
import unittest
from engine import file_loader
class FileLoaderTest(unittest.TestCase):
def test_load_units(self):
dicts = file_loader.read_and_parse_json('units')
self.assertIsInstance(dicts, list)
self.assertGreater(len(dicts), 0)
for dict_ in dicts:
self.assertIsInstance(dict_, dict)
def testLoadStruct(self):
unit_map = file_loader.load_struct('units')
for unit_name, unit_args in unit_map.items():
self.assertIsInstance(unit_name, str)
self.assertIsInstance(unit_args, dict)
def testLoadEnum(self):
unit_map = file_loader.load_enum('attack_types')
self.assertIsInstance(unit_map, dict)
for unit_name, unit_enum in unit_map.items():
self.assertIsInstance(unit_name, str)
self.assertIsInstance(unit_enum, int)
if __name__ == '__main__':
unittest.main()
|
import unittest
from engine import file_loader
class FileLoaderTest(unittest.TestCase):
def test_load_units(self):
dicts = file_loader.read_and_parse_json('units')
self.assertIsInstance(dicts, list)
self.assertGreater(len(dicts), 0)
for dict_ in dicts:
self.assertIsInstance(dict_, dict)
+ def testLoadStruct(self):
+ unit_map = file_loader.load_struct('units')
+ for unit_name, unit_args in unit_map.items():
+ self.assertIsInstance(unit_name, str)
+ self.assertIsInstance(unit_args, dict)
+
+ def testLoadEnum(self):
+ unit_map = file_loader.load_enum('attack_types')
+ self.assertIsInstance(unit_map, dict)
+ for unit_name, unit_enum in unit_map.items():
+ self.assertIsInstance(unit_name, str)
+ self.assertIsInstance(unit_enum, int)
+
if __name__ == '__main__':
unittest.main()
|
1290bc59774aac7756658c3480d6a5293c7a3467
|
planner/models.py
|
planner/models.py
|
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
start = models.CharField(max_length=63)
end = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.start,
self.end
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
Rename Route model's start and end fields to be consistent with front end identification
|
Rename Route model's start and end fields to be consistent with front end identification
|
Python
|
apache-2.0
|
jwarren116/RoadTrip,jwarren116/RoadTrip,jwarren116/RoadTrip
|
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
+ start = models.CharField(max_length=63)
- origin = models.CharField(max_length=63)
+ end = models.CharField(max_length=63)
- destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
- self.origin,
- self.destination
+ self.start,
+ self.end
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
Rename Route model's start and end fields to be consistent with front end identification
|
## Code Before:
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
## Instruction:
Rename Route model's start and end fields to be consistent with front end identification
## Code After:
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
start = models.CharField(max_length=63)
end = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.start,
self.end
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
+ start = models.CharField(max_length=63)
- origin = models.CharField(max_length=63)
? ^^^^^
+ end = models.CharField(max_length=63)
? ^ +
- destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
- self.origin,
- self.destination
? -- -- ^^^
+ self.start,
? + ^
+ self.end
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
57461a7ebd35544c506e6b5021ff11c3b6dd943e
|
normandy/studies/models.py
|
normandy/studies/models.py
|
from django.db import models
from django.template.loader import render_to_string
from normandy.recipes.models import Recipe
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
@property
def recipes_used_by(self):
"""Set of enabled recipes that are using this extension."""
return Recipe.objects.filter(
latest_revision__arguments_json__contains=self.xpi.url,
)
def recipes_used_by_html(self):
return render_to_string('admin/field_recipe_list.html', {
'recipes': self.recipes_used_by.order_by('latest_revision__name'),
})
recipes_used_by_html.short_description = 'Used in Recipes'
|
from django.db import models
from django.template.loader import render_to_string
from normandy.recipes.models import Recipe
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
class Meta:
ordering = ('-id',)
@property
def recipes_used_by(self):
"""Set of enabled recipes that are using this extension."""
return Recipe.objects.filter(
latest_revision__arguments_json__contains=self.xpi.url,
)
def recipes_used_by_html(self):
return render_to_string('admin/field_recipe_list.html', {
'recipes': self.recipes_used_by.order_by('latest_revision__name'),
})
recipes_used_by_html.short_description = 'Used in Recipes'
|
Add ordering to Extension model
|
Add ordering to Extension model
|
Python
|
mpl-2.0
|
mozilla/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy
|
from django.db import models
from django.template.loader import render_to_string
from normandy.recipes.models import Recipe
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
+
+ class Meta:
+ ordering = ('-id',)
@property
def recipes_used_by(self):
"""Set of enabled recipes that are using this extension."""
return Recipe.objects.filter(
latest_revision__arguments_json__contains=self.xpi.url,
)
def recipes_used_by_html(self):
return render_to_string('admin/field_recipe_list.html', {
'recipes': self.recipes_used_by.order_by('latest_revision__name'),
})
recipes_used_by_html.short_description = 'Used in Recipes'
|
Add ordering to Extension model
|
## Code Before:
from django.db import models
from django.template.loader import render_to_string
from normandy.recipes.models import Recipe
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
@property
def recipes_used_by(self):
"""Set of enabled recipes that are using this extension."""
return Recipe.objects.filter(
latest_revision__arguments_json__contains=self.xpi.url,
)
def recipes_used_by_html(self):
return render_to_string('admin/field_recipe_list.html', {
'recipes': self.recipes_used_by.order_by('latest_revision__name'),
})
recipes_used_by_html.short_description = 'Used in Recipes'
## Instruction:
Add ordering to Extension model
## Code After:
from django.db import models
from django.template.loader import render_to_string
from normandy.recipes.models import Recipe
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
class Meta:
ordering = ('-id',)
@property
def recipes_used_by(self):
"""Set of enabled recipes that are using this extension."""
return Recipe.objects.filter(
latest_revision__arguments_json__contains=self.xpi.url,
)
def recipes_used_by_html(self):
return render_to_string('admin/field_recipe_list.html', {
'recipes': self.recipes_used_by.order_by('latest_revision__name'),
})
recipes_used_by_html.short_description = 'Used in Recipes'
|
from django.db import models
from django.template.loader import render_to_string
from normandy.recipes.models import Recipe
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
+
+ class Meta:
+ ordering = ('-id',)
@property
def recipes_used_by(self):
"""Set of enabled recipes that are using this extension."""
return Recipe.objects.filter(
latest_revision__arguments_json__contains=self.xpi.url,
)
def recipes_used_by_html(self):
return render_to_string('admin/field_recipe_list.html', {
'recipes': self.recipes_used_by.order_by('latest_revision__name'),
})
recipes_used_by_html.short_description = 'Used in Recipes'
|
ed3c03ac4f213f3882e28f25ae0596a7021928cd
|
test/ParseableInterface/Inputs/make-unreadable.py
|
test/ParseableInterface/Inputs/make-unreadable.py
|
import platform
import subprocess
import sys
if platform.system() == 'Windows':
import ctypes
AdvAPI32 = ctypes.windll.Advapi32
from ctypes.wintypes import POINTER
UNLEN = 256
GetUserNameW = AdvAPI32.GetUserNameW
GetUserNameW.argtypes = (
ctypes.c_wchar_p, # _In_Out_ lpBuffer
POINTER(ctypes.c_uint) # _In_out_ pcBuffer
)
GetUserNameW.restype = ctypes.c_uint
buffer = ctypes.create_unicode_buffer(UNLEN + 1)
size = ctypes.c_uint(len(buffer))
GetUserNameW(buffer, ctypes.byref(size))
for path in sys.argv[1:]:
subprocess.call(['icacls', path, '/deny',
'{}:(R)'.format(buffer.value)])
else:
for path in sys.argv[1:]:
subprocess.call(['chmod', 'a-r', path])
|
import platform
import subprocess
import sys
if platform.system() == 'Windows':
import ctypes
AdvAPI32 = ctypes.windll.Advapi32
from ctypes.wintypes import POINTER
UNLEN = 256
GetUserNameW = AdvAPI32.GetUserNameW
GetUserNameW.argtypes = (
ctypes.c_wchar_p, # _In_Out_ lpBuffer
POINTER(ctypes.c_uint) # _In_out_ pcBuffer
)
GetUserNameW.restype = ctypes.c_uint
buffer = ctypes.create_unicode_buffer(UNLEN + 1)
size = ctypes.c_uint(len(buffer))
GetUserNameW(buffer, ctypes.byref(size))
# For NetworkService, Host$ is returned, so we choose have to turn it back
# into something that icacls understands.
if not buffer.value.endswith('$'):
user_name = buffer.value
else:
user_name = 'NT AUTHORITY\\NetworkService'
for path in sys.argv[1:]:
subprocess.call(['icacls', path, '/deny',
'{}:(R)'.format(user_name)])
else:
for path in sys.argv[1:]:
subprocess.call(['chmod', 'a-r', path])
|
Fix handling of Network Service username.
|
[windows] Fix handling of Network Service username.
In Windows Server 2016 at least, the Network Service user (the one being
used by the CI machine) is returned as Host$, which icacls doesn't
understand. Turn the name into something that icacls if we get a name
that ends with a dollar.
|
Python
|
apache-2.0
|
atrick/swift,hooman/swift,harlanhaskins/swift,shahmishal/swift,stephentyrone/swift,jmgc/swift,devincoughlin/swift,ahoppen/swift,tkremenek/swift,xedin/swift,shahmishal/swift,xwu/swift,xedin/swift,harlanhaskins/swift,harlanhaskins/swift,sschiau/swift,shajrawi/swift,karwa/swift,gribozavr/swift,apple/swift,CodaFi/swift,ahoppen/swift,lorentey/swift,nathawes/swift,JGiola/swift,allevato/swift,airspeedswift/swift,harlanhaskins/swift,hooman/swift,karwa/swift,rudkx/swift,CodaFi/swift,gregomni/swift,lorentey/swift,sschiau/swift,shajrawi/swift,karwa/swift,parkera/swift,tkremenek/swift,sschiau/swift,devincoughlin/swift,xedin/swift,aschwaighofer/swift,airspeedswift/swift,jmgc/swift,nathawes/swift,lorentey/swift,tkremenek/swift,allevato/swift,jmgc/swift,xwu/swift,JGiola/swift,ahoppen/swift,shahmishal/swift,tkremenek/swift,roambotics/swift,benlangmuir/swift,roambotics/swift,hooman/swift,atrick/swift,gribozavr/swift,gregomni/swift,glessard/swift,xedin/swift,apple/swift,jckarter/swift,gregomni/swift,karwa/swift,benlangmuir/swift,sschiau/swift,xedin/swift,stephentyrone/swift,aschwaighofer/swift,jckarter/swift,CodaFi/swift,lorentey/swift,CodaFi/swift,lorentey/swift,harlanhaskins/swift,tkremenek/swift,karwa/swift,gribozavr/swift,nathawes/swift,gregomni/swift,tkremenek/swift,JGiola/swift,nathawes/swift,JGiola/swift,parkera/swift,gregomni/swift,aschwaighofer/swift,airspeedswift/swift,CodaFi/swift,karwa/swift,apple/swift,shajrawi/swift,atrick/swift,stephentyrone/swift,hooman/swift,apple/swift,xwu/swift,parkera/swift,CodaFi/swift,glessard/swift,devincoughlin/swift,glessard/swift,hooman/swift,rudkx/swift,ahoppen/swift,harlanhaskins/swift,sschiau/swift,hooman/swift,allevato/swift,shajrawi/swift,ahoppen/swift,allevato/swift,devincoughlin/swift,shajrawi/swift,devincoughlin/swift,nathawes/swift,JGiola/swift,rudkx/swift,devincoughlin/swift,benlangmuir/swift,parkera/swift,roambotics/swift,rudkx/swift,jmgc/swift,xwu/swift,xedin/swift,roambotics/swift,aschwaighofer/swift,jmgc/swift,airspeedswift/swift,shahmishal/swift,stephentyrone/swift,gribozavr/swift,karwa/swift,devincoughlin/swift,gribozavr/swift,nathawes/swift,roambotics/swift,benlangmuir/swift,rudkx/swift,shahmishal/swift,xwu/swift,glessard/swift,karwa/swift,aschwaighofer/swift,allevato/swift,parkera/swift,glessard/swift,atrick/swift,tkremenek/swift,gribozavr/swift,sschiau/swift,jmgc/swift,benlangmuir/swift,jckarter/swift,jckarter/swift,shahmishal/swift,allevato/swift,gregomni/swift,airspeedswift/swift,parkera/swift,shahmishal/swift,gribozavr/swift,sschiau/swift,jckarter/swift,JGiola/swift,harlanhaskins/swift,xedin/swift,allevato/swift,jckarter/swift,sschiau/swift,nathawes/swift,airspeedswift/swift,xedin/swift,lorentey/swift,glessard/swift,devincoughlin/swift,atrick/swift,lorentey/swift,aschwaighofer/swift,CodaFi/swift,gribozavr/swift,roambotics/swift,shajrawi/swift,rudkx/swift,airspeedswift/swift,lorentey/swift,stephentyrone/swift,apple/swift,aschwaighofer/swift,xwu/swift,xwu/swift,benlangmuir/swift,ahoppen/swift,atrick/swift,jckarter/swift,parkera/swift,parkera/swift,shajrawi/swift,hooman/swift,shahmishal/swift,stephentyrone/swift,shajrawi/swift,jmgc/swift,stephentyrone/swift,apple/swift
|
import platform
import subprocess
import sys
if platform.system() == 'Windows':
import ctypes
AdvAPI32 = ctypes.windll.Advapi32
from ctypes.wintypes import POINTER
UNLEN = 256
GetUserNameW = AdvAPI32.GetUserNameW
GetUserNameW.argtypes = (
ctypes.c_wchar_p, # _In_Out_ lpBuffer
POINTER(ctypes.c_uint) # _In_out_ pcBuffer
)
GetUserNameW.restype = ctypes.c_uint
buffer = ctypes.create_unicode_buffer(UNLEN + 1)
size = ctypes.c_uint(len(buffer))
GetUserNameW(buffer, ctypes.byref(size))
+ # For NetworkService, Host$ is returned, so we choose have to turn it back
+ # into something that icacls understands.
+ if not buffer.value.endswith('$'):
+ user_name = buffer.value
+ else:
+ user_name = 'NT AUTHORITY\\NetworkService'
for path in sys.argv[1:]:
subprocess.call(['icacls', path, '/deny',
- '{}:(R)'.format(buffer.value)])
+ '{}:(R)'.format(user_name)])
else:
for path in sys.argv[1:]:
subprocess.call(['chmod', 'a-r', path])
|
Fix handling of Network Service username.
|
## Code Before:
import platform
import subprocess
import sys
if platform.system() == 'Windows':
import ctypes
AdvAPI32 = ctypes.windll.Advapi32
from ctypes.wintypes import POINTER
UNLEN = 256
GetUserNameW = AdvAPI32.GetUserNameW
GetUserNameW.argtypes = (
ctypes.c_wchar_p, # _In_Out_ lpBuffer
POINTER(ctypes.c_uint) # _In_out_ pcBuffer
)
GetUserNameW.restype = ctypes.c_uint
buffer = ctypes.create_unicode_buffer(UNLEN + 1)
size = ctypes.c_uint(len(buffer))
GetUserNameW(buffer, ctypes.byref(size))
for path in sys.argv[1:]:
subprocess.call(['icacls', path, '/deny',
'{}:(R)'.format(buffer.value)])
else:
for path in sys.argv[1:]:
subprocess.call(['chmod', 'a-r', path])
## Instruction:
Fix handling of Network Service username.
## Code After:
import platform
import subprocess
import sys
if platform.system() == 'Windows':
import ctypes
AdvAPI32 = ctypes.windll.Advapi32
from ctypes.wintypes import POINTER
UNLEN = 256
GetUserNameW = AdvAPI32.GetUserNameW
GetUserNameW.argtypes = (
ctypes.c_wchar_p, # _In_Out_ lpBuffer
POINTER(ctypes.c_uint) # _In_out_ pcBuffer
)
GetUserNameW.restype = ctypes.c_uint
buffer = ctypes.create_unicode_buffer(UNLEN + 1)
size = ctypes.c_uint(len(buffer))
GetUserNameW(buffer, ctypes.byref(size))
# For NetworkService, Host$ is returned, so we choose have to turn it back
# into something that icacls understands.
if not buffer.value.endswith('$'):
user_name = buffer.value
else:
user_name = 'NT AUTHORITY\\NetworkService'
for path in sys.argv[1:]:
subprocess.call(['icacls', path, '/deny',
'{}:(R)'.format(user_name)])
else:
for path in sys.argv[1:]:
subprocess.call(['chmod', 'a-r', path])
|
import platform
import subprocess
import sys
if platform.system() == 'Windows':
import ctypes
AdvAPI32 = ctypes.windll.Advapi32
from ctypes.wintypes import POINTER
UNLEN = 256
GetUserNameW = AdvAPI32.GetUserNameW
GetUserNameW.argtypes = (
ctypes.c_wchar_p, # _In_Out_ lpBuffer
POINTER(ctypes.c_uint) # _In_out_ pcBuffer
)
GetUserNameW.restype = ctypes.c_uint
buffer = ctypes.create_unicode_buffer(UNLEN + 1)
size = ctypes.c_uint(len(buffer))
GetUserNameW(buffer, ctypes.byref(size))
+ # For NetworkService, Host$ is returned, so we choose have to turn it back
+ # into something that icacls understands.
+ if not buffer.value.endswith('$'):
+ user_name = buffer.value
+ else:
+ user_name = 'NT AUTHORITY\\NetworkService'
for path in sys.argv[1:]:
subprocess.call(['icacls', path, '/deny',
- '{}:(R)'.format(buffer.value)])
? - ^^ ^^ ^^
+ '{}:(R)'.format(user_name)])
? ^ ^^ ^
else:
for path in sys.argv[1:]:
subprocess.call(['chmod', 'a-r', path])
|
f769360dbb6da83fc8bf9c244c04b3d2f7c49ffa
|
lab/runnerctl.py
|
lab/runnerctl.py
|
import pytest
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
|
import pytest
import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
|
Move some fixtures into better places
|
Move some fixtures into better places
Move datadir into the sipsecmon plugin and testname into
lab.runnerctl.
|
Python
|
mpl-2.0
|
sangoma/pytestlab
|
import pytest
+ import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
+
+ @pytest.fixture(scope='class')
+ def testname(request):
+ """Pytest test node name with all unfriendly characters transformed
+ into underscores. The lifetime is class scoped since this name is
+ often used to provision remote sw profiles which live for the entirety
+ of a test suite.
+ """
+ return request.node.name.translate(
+ string.maketrans('\[', '__')).strip(']')
+
|
Move some fixtures into better places
|
## Code Before:
import pytest
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
## Instruction:
Move some fixtures into better places
## Code After:
import pytest
import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
|
import pytest
+ import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
+
+
+ @pytest.fixture(scope='class')
+ def testname(request):
+ """Pytest test node name with all unfriendly characters transformed
+ into underscores. The lifetime is class scoped since this name is
+ often used to provision remote sw profiles which live for the entirety
+ of a test suite.
+ """
+ return request.node.name.translate(
+ string.maketrans('\[', '__')).strip(']')
|
c476cb5cf1bead63f19871fa1db9769e236fbe09
|
siren_files.py
|
siren_files.py
|
source_files = ['check_siren.py', 'colours', 'credits', 'dataview', 'dijkstra_4',
'displayobject', 'displaytable', 'editini', 'flexiplot', 'floaters',
'getmap', 'getmerra2', 'getmodels', 'grid', 'indexweather', 'inisyntax',
'makegrid', 'makeweatherfiles',
'newstation', 'plotweather', 'powerclasses',
'powermatch', 'powermodel', 'powerplot', 'sammodels', 'samrun',
'senutils', 'siren', 'sirenicons', 'sirenm', 'sirenupd', 'ssc',
'station', 'superpower', 'towns', 'turbine', 'updateswis',
'viewresource', 'visualise', 'wascene', 'worldwindow', 'zoompan',
'getfiles.ini', 'about.html', 'credits.html', 'help.html',
'makeweatherfiles.html', 'SIREN_notes.html', 'siren_versions.csv',
'siren_files.py', 'compare_to_siren.git.py']
version_files = ['flexiplot', 'getmap', 'getmerra2', 'indexweather', 'makegrid',
'makeweatherfiles',
'powermatch', 'powerplot', 'siren', 'sirenm', 'sirenupd',
'updateswis']
|
source_files = ['check_siren.py', 'colours', 'credits', 'dataview', 'dijkstra_4',
'displayobject', 'displaytable', 'editini', 'flexiplot', 'floaters',
'getmap', 'getmerra2', 'getmodels', 'grid', 'indexweather', 'inisyntax',
'makegrid', 'makeweatherfiles',
'newstation', 'plotweather', 'powerclasses',
'powermatch', 'powermodel', 'powerplot', 'sammodels', 'samrun',
'senutils', 'siren', 'sirenicons', 'sirenm', 'sirenupd', 'ssc',
'station', 'superpower', 'towns', 'turbine', 'updateswis',
'viewresource', 'visualise', 'wascene', 'worldwindow', 'zoompan',
'getfiles.ini', 'about.html', 'credits.html', 'help.html',
'SIREN_notes.html', 'siren_versions.csv',
'siren_files.py', 'compare_to_siren.git.py']
version_files = ['flexiplot', 'getmap', 'getmerra2', 'indexweather', 'makegrid',
'makeweatherfiles',
'powermatch', 'powerplot', 'siren', 'sirenm', 'sirenupd',
'template', 'updateswis']
|
Remove makeweatherfiles, add template for Windows version file
|
Remove makeweatherfiles, add template for Windows version file
|
Python
|
agpl-3.0
|
ozsolarwind/siren,ozsolarwind/siren
|
source_files = ['check_siren.py', 'colours', 'credits', 'dataview', 'dijkstra_4',
'displayobject', 'displaytable', 'editini', 'flexiplot', 'floaters',
'getmap', 'getmerra2', 'getmodels', 'grid', 'indexweather', 'inisyntax',
'makegrid', 'makeweatherfiles',
'newstation', 'plotweather', 'powerclasses',
'powermatch', 'powermodel', 'powerplot', 'sammodels', 'samrun',
'senutils', 'siren', 'sirenicons', 'sirenm', 'sirenupd', 'ssc',
'station', 'superpower', 'towns', 'turbine', 'updateswis',
'viewresource', 'visualise', 'wascene', 'worldwindow', 'zoompan',
'getfiles.ini', 'about.html', 'credits.html', 'help.html',
- 'makeweatherfiles.html', 'SIREN_notes.html', 'siren_versions.csv',
+ 'SIREN_notes.html', 'siren_versions.csv',
'siren_files.py', 'compare_to_siren.git.py']
version_files = ['flexiplot', 'getmap', 'getmerra2', 'indexweather', 'makegrid',
'makeweatherfiles',
'powermatch', 'powerplot', 'siren', 'sirenm', 'sirenupd',
- 'updateswis']
+ 'template', 'updateswis']
|
Remove makeweatherfiles, add template for Windows version file
|
## Code Before:
source_files = ['check_siren.py', 'colours', 'credits', 'dataview', 'dijkstra_4',
'displayobject', 'displaytable', 'editini', 'flexiplot', 'floaters',
'getmap', 'getmerra2', 'getmodels', 'grid', 'indexweather', 'inisyntax',
'makegrid', 'makeweatherfiles',
'newstation', 'plotweather', 'powerclasses',
'powermatch', 'powermodel', 'powerplot', 'sammodels', 'samrun',
'senutils', 'siren', 'sirenicons', 'sirenm', 'sirenupd', 'ssc',
'station', 'superpower', 'towns', 'turbine', 'updateswis',
'viewresource', 'visualise', 'wascene', 'worldwindow', 'zoompan',
'getfiles.ini', 'about.html', 'credits.html', 'help.html',
'makeweatherfiles.html', 'SIREN_notes.html', 'siren_versions.csv',
'siren_files.py', 'compare_to_siren.git.py']
version_files = ['flexiplot', 'getmap', 'getmerra2', 'indexweather', 'makegrid',
'makeweatherfiles',
'powermatch', 'powerplot', 'siren', 'sirenm', 'sirenupd',
'updateswis']
## Instruction:
Remove makeweatherfiles, add template for Windows version file
## Code After:
source_files = ['check_siren.py', 'colours', 'credits', 'dataview', 'dijkstra_4',
'displayobject', 'displaytable', 'editini', 'flexiplot', 'floaters',
'getmap', 'getmerra2', 'getmodels', 'grid', 'indexweather', 'inisyntax',
'makegrid', 'makeweatherfiles',
'newstation', 'plotweather', 'powerclasses',
'powermatch', 'powermodel', 'powerplot', 'sammodels', 'samrun',
'senutils', 'siren', 'sirenicons', 'sirenm', 'sirenupd', 'ssc',
'station', 'superpower', 'towns', 'turbine', 'updateswis',
'viewresource', 'visualise', 'wascene', 'worldwindow', 'zoompan',
'getfiles.ini', 'about.html', 'credits.html', 'help.html',
'SIREN_notes.html', 'siren_versions.csv',
'siren_files.py', 'compare_to_siren.git.py']
version_files = ['flexiplot', 'getmap', 'getmerra2', 'indexweather', 'makegrid',
'makeweatherfiles',
'powermatch', 'powerplot', 'siren', 'sirenm', 'sirenupd',
'template', 'updateswis']
|
source_files = ['check_siren.py', 'colours', 'credits', 'dataview', 'dijkstra_4',
'displayobject', 'displaytable', 'editini', 'flexiplot', 'floaters',
'getmap', 'getmerra2', 'getmodels', 'grid', 'indexweather', 'inisyntax',
'makegrid', 'makeweatherfiles',
'newstation', 'plotweather', 'powerclasses',
'powermatch', 'powermodel', 'powerplot', 'sammodels', 'samrun',
'senutils', 'siren', 'sirenicons', 'sirenm', 'sirenupd', 'ssc',
'station', 'superpower', 'towns', 'turbine', 'updateswis',
'viewresource', 'visualise', 'wascene', 'worldwindow', 'zoompan',
'getfiles.ini', 'about.html', 'credits.html', 'help.html',
- 'makeweatherfiles.html', 'SIREN_notes.html', 'siren_versions.csv',
? -------------------------
+ 'SIREN_notes.html', 'siren_versions.csv',
'siren_files.py', 'compare_to_siren.git.py']
version_files = ['flexiplot', 'getmap', 'getmerra2', 'indexweather', 'makegrid',
'makeweatherfiles',
'powermatch', 'powerplot', 'siren', 'sirenm', 'sirenupd',
- 'updateswis']
+ 'template', 'updateswis']
? ++++++++++++
|
5c3900e12216164712c9e7fe7ea064e70fae8d1b
|
enumfields/enums.py
|
enumfields/enums.py
|
import inspect
from django.utils.encoding import force_bytes, python_2_unicode_compatible
from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta
import six
class EnumMeta(BaseEnumMeta):
def __new__(cls, name, bases, attrs):
Labels = attrs.get('Labels')
if Labels is not None and inspect.isclass(Labels):
del attrs['Labels']
obj = BaseEnumMeta.__new__(cls, name, bases, attrs)
for m in obj:
try:
m.label = getattr(Labels, m.name)
except AttributeError:
m.label = m.name.replace('_', ' ').title()
return obj
@python_2_unicode_compatible
class Enum(six.with_metaclass(EnumMeta, BaseEnum)):
@classmethod
def choices(cls):
"""
Returns a list formatted for use as field choices.
(See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices)
"""
return tuple((m.value, m.label) for m in cls)
def __str__(self):
"""
Show our label when Django uses the Enum for displaying in a view
"""
return force_bytes(self.label)
|
import inspect
from django.utils.encoding import force_bytes, python_2_unicode_compatible
from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta
import six
class EnumMeta(BaseEnumMeta):
def __new__(cls, name, bases, attrs):
Labels = attrs.get('Labels')
if Labels is not None and inspect.isclass(Labels):
del attrs['Labels']
if hasattr(attrs, '_member_names'):
attrs._member_names.remove('Labels')
obj = BaseEnumMeta.__new__(cls, name, bases, attrs)
for m in obj:
try:
m.label = getattr(Labels, m.name)
except AttributeError:
m.label = m.name.replace('_', ' ').title()
return obj
@python_2_unicode_compatible
class Enum(six.with_metaclass(EnumMeta, BaseEnum)):
@classmethod
def choices(cls):
"""
Returns a list formatted for use as field choices.
(See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices)
"""
return tuple((m.value, m.label) for m in cls)
def __str__(self):
"""
Show our label when Django uses the Enum for displaying in a view
"""
return force_bytes(self.label)
|
Fix 'Labels' class in Python 3.
|
Fix 'Labels' class in Python 3.
In Python 3, the attrs dict will already be an _EnumDict, which has a
separate list of member names (in Python 2, it is still a plain dict at this
point).
|
Python
|
mit
|
suutari-ai/django-enumfields,jackyyf/django-enumfields,bxm156/django-enumfields,jessamynsmith/django-enumfields
|
import inspect
from django.utils.encoding import force_bytes, python_2_unicode_compatible
from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta
import six
class EnumMeta(BaseEnumMeta):
def __new__(cls, name, bases, attrs):
Labels = attrs.get('Labels')
if Labels is not None and inspect.isclass(Labels):
del attrs['Labels']
+ if hasattr(attrs, '_member_names'):
+ attrs._member_names.remove('Labels')
obj = BaseEnumMeta.__new__(cls, name, bases, attrs)
for m in obj:
try:
m.label = getattr(Labels, m.name)
except AttributeError:
m.label = m.name.replace('_', ' ').title()
return obj
@python_2_unicode_compatible
class Enum(six.with_metaclass(EnumMeta, BaseEnum)):
@classmethod
def choices(cls):
"""
Returns a list formatted for use as field choices.
(See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices)
"""
return tuple((m.value, m.label) for m in cls)
def __str__(self):
"""
Show our label when Django uses the Enum for displaying in a view
"""
return force_bytes(self.label)
|
Fix 'Labels' class in Python 3.
|
## Code Before:
import inspect
from django.utils.encoding import force_bytes, python_2_unicode_compatible
from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta
import six
class EnumMeta(BaseEnumMeta):
def __new__(cls, name, bases, attrs):
Labels = attrs.get('Labels')
if Labels is not None and inspect.isclass(Labels):
del attrs['Labels']
obj = BaseEnumMeta.__new__(cls, name, bases, attrs)
for m in obj:
try:
m.label = getattr(Labels, m.name)
except AttributeError:
m.label = m.name.replace('_', ' ').title()
return obj
@python_2_unicode_compatible
class Enum(six.with_metaclass(EnumMeta, BaseEnum)):
@classmethod
def choices(cls):
"""
Returns a list formatted for use as field choices.
(See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices)
"""
return tuple((m.value, m.label) for m in cls)
def __str__(self):
"""
Show our label when Django uses the Enum for displaying in a view
"""
return force_bytes(self.label)
## Instruction:
Fix 'Labels' class in Python 3.
## Code After:
import inspect
from django.utils.encoding import force_bytes, python_2_unicode_compatible
from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta
import six
class EnumMeta(BaseEnumMeta):
def __new__(cls, name, bases, attrs):
Labels = attrs.get('Labels')
if Labels is not None and inspect.isclass(Labels):
del attrs['Labels']
if hasattr(attrs, '_member_names'):
attrs._member_names.remove('Labels')
obj = BaseEnumMeta.__new__(cls, name, bases, attrs)
for m in obj:
try:
m.label = getattr(Labels, m.name)
except AttributeError:
m.label = m.name.replace('_', ' ').title()
return obj
@python_2_unicode_compatible
class Enum(six.with_metaclass(EnumMeta, BaseEnum)):
@classmethod
def choices(cls):
"""
Returns a list formatted for use as field choices.
(See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices)
"""
return tuple((m.value, m.label) for m in cls)
def __str__(self):
"""
Show our label when Django uses the Enum for displaying in a view
"""
return force_bytes(self.label)
|
import inspect
from django.utils.encoding import force_bytes, python_2_unicode_compatible
from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta
import six
class EnumMeta(BaseEnumMeta):
def __new__(cls, name, bases, attrs):
Labels = attrs.get('Labels')
if Labels is not None and inspect.isclass(Labels):
del attrs['Labels']
+ if hasattr(attrs, '_member_names'):
+ attrs._member_names.remove('Labels')
obj = BaseEnumMeta.__new__(cls, name, bases, attrs)
for m in obj:
try:
m.label = getattr(Labels, m.name)
except AttributeError:
m.label = m.name.replace('_', ' ').title()
return obj
@python_2_unicode_compatible
class Enum(six.with_metaclass(EnumMeta, BaseEnum)):
@classmethod
def choices(cls):
"""
Returns a list formatted for use as field choices.
(See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices)
"""
return tuple((m.value, m.label) for m in cls)
def __str__(self):
"""
Show our label when Django uses the Enum for displaying in a view
"""
return force_bytes(self.label)
|
9d78bc8bbe8d0065debd8b4e5e72ed73f135ed63
|
linter.py
|
linter.py
|
"""This module exports the Bashate plugin class."""
from SublimeLinter.lint import Linter
import os
class Bashate(Linter):
"""Provides an interface to bashate."""
cmd = 'bashate'
comment_re = r'\s*#'
regex = (
r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)'
)
defaults = {
'selector': 'source.shell.bash',
'--ignore=,': '',
'--warn=,': '',
'--error=,': ''
}
tempfile_suffix = 'sh'
check_version = False
def tmpfile(self, cmd, code, suffix=''):
"""
Run an external executable using a temp file to pass code and return its output.
We override this to have the tmpfile extension match what is being
linted so E005 is valid.
"""
filename, extension = os.path.splitext(self.filename)
extension = '.missingextension' if not extension else extension
return super().tmpfile(cmd, code, extension)
|
"""This module exports the Bashate plugin class."""
from SublimeLinter.lint import Linter
import os
class Bashate(Linter):
"""Provides an interface to bashate."""
cmd = 'bashate'
regex = (
r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)'
)
defaults = {
'selector': 'source.shell.bash',
'--ignore=,': '',
'--warn=,': '',
'--error=,': ''
}
tempfile_suffix = 'sh'
def tmpfile(self, cmd, code, suffix=''):
"""
Run an external executable using a temp file to pass code and return its output.
We override this to have the tmpfile extension match what is being
linted so E005 is valid.
"""
filename, extension = os.path.splitext(self.filename)
extension = '.missingextension' if not extension else extension
return super().tmpfile(cmd, code, extension)
|
Remove deprecated attributes comment_re and check_version
|
Remove deprecated attributes comment_re and check_version
|
Python
|
mit
|
maristgeek/SublimeLinter-contrib-bashate
|
"""This module exports the Bashate plugin class."""
from SublimeLinter.lint import Linter
import os
class Bashate(Linter):
"""Provides an interface to bashate."""
cmd = 'bashate'
- comment_re = r'\s*#'
regex = (
r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)'
)
defaults = {
'selector': 'source.shell.bash',
'--ignore=,': '',
'--warn=,': '',
'--error=,': ''
}
tempfile_suffix = 'sh'
- check_version = False
def tmpfile(self, cmd, code, suffix=''):
"""
Run an external executable using a temp file to pass code and return its output.
We override this to have the tmpfile extension match what is being
linted so E005 is valid.
"""
filename, extension = os.path.splitext(self.filename)
extension = '.missingextension' if not extension else extension
return super().tmpfile(cmd, code, extension)
|
Remove deprecated attributes comment_re and check_version
|
## Code Before:
"""This module exports the Bashate plugin class."""
from SublimeLinter.lint import Linter
import os
class Bashate(Linter):
"""Provides an interface to bashate."""
cmd = 'bashate'
comment_re = r'\s*#'
regex = (
r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)'
)
defaults = {
'selector': 'source.shell.bash',
'--ignore=,': '',
'--warn=,': '',
'--error=,': ''
}
tempfile_suffix = 'sh'
check_version = False
def tmpfile(self, cmd, code, suffix=''):
"""
Run an external executable using a temp file to pass code and return its output.
We override this to have the tmpfile extension match what is being
linted so E005 is valid.
"""
filename, extension = os.path.splitext(self.filename)
extension = '.missingextension' if not extension else extension
return super().tmpfile(cmd, code, extension)
## Instruction:
Remove deprecated attributes comment_re and check_version
## Code After:
"""This module exports the Bashate plugin class."""
from SublimeLinter.lint import Linter
import os
class Bashate(Linter):
"""Provides an interface to bashate."""
cmd = 'bashate'
regex = (
r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)'
)
defaults = {
'selector': 'source.shell.bash',
'--ignore=,': '',
'--warn=,': '',
'--error=,': ''
}
tempfile_suffix = 'sh'
def tmpfile(self, cmd, code, suffix=''):
"""
Run an external executable using a temp file to pass code and return its output.
We override this to have the tmpfile extension match what is being
linted so E005 is valid.
"""
filename, extension = os.path.splitext(self.filename)
extension = '.missingextension' if not extension else extension
return super().tmpfile(cmd, code, extension)
|
"""This module exports the Bashate plugin class."""
from SublimeLinter.lint import Linter
import os
class Bashate(Linter):
"""Provides an interface to bashate."""
cmd = 'bashate'
- comment_re = r'\s*#'
regex = (
r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)'
)
defaults = {
'selector': 'source.shell.bash',
'--ignore=,': '',
'--warn=,': '',
'--error=,': ''
}
tempfile_suffix = 'sh'
- check_version = False
def tmpfile(self, cmd, code, suffix=''):
"""
Run an external executable using a temp file to pass code and return its output.
We override this to have the tmpfile extension match what is being
linted so E005 is valid.
"""
filename, extension = os.path.splitext(self.filename)
extension = '.missingextension' if not extension else extension
return super().tmpfile(cmd, code, extension)
|
43004cfd537c801475bf7e3b3c80dee4da18712f
|
backend/hook_manager.py
|
backend/hook_manager.py
|
""" Hook Manager """
class HookManager(object):
""" Registers an manages hooks. Hooks are callback functions called when the backend does a specific action. """
def __init__(self):
self.hooks = {}
def add_hook(self, name, callback):
""" Add a new hook that can be called with the call_hook function """
hook_list = self.hooks.get(name, [])
hook_list.append(callback)
self.hooks[name] = hook_list
def call_hook(self, name, **kwargs):
""" Call all hooks registered with this name """
for func in self.hooks.get(name, []):
func(**kwargs)
|
""" Hook Manager """
class HookManager(object):
""" Registers an manages hooks. Hooks are callback functions called when the backend does a specific action. """
def __init__(self):
self.hooks = {}
def add_hook(self, name, callback):
""" Add a new hook that can be called with the call_hook function """
hook_list = self.hooks.get(name, [])
hook_list.append(callback)
self.hooks[name] = hook_list
def call_hook(self, name, **kwargs):
""" Call all hooks registered with this name. Returns a list of the returns values of the hooks (in the order the hooks were added)"""
return map(lambda x: x(**kwargs), self.hooks.get(name, []))
|
Allow hooks to return values (and simplify the code)
|
Allow hooks to return values (and simplify the code)
|
Python
|
agpl-3.0
|
layus/INGInious,layus/INGInious,layus/INGInious,GuillaumeDerval/INGInious,GuillaumeDerval/INGInious,layus/INGInious,GuillaumeDerval/INGInious,GuillaumeDerval/INGInious
|
""" Hook Manager """
class HookManager(object):
""" Registers an manages hooks. Hooks are callback functions called when the backend does a specific action. """
def __init__(self):
self.hooks = {}
def add_hook(self, name, callback):
""" Add a new hook that can be called with the call_hook function """
hook_list = self.hooks.get(name, [])
hook_list.append(callback)
self.hooks[name] = hook_list
def call_hook(self, name, **kwargs):
+ """ Call all hooks registered with this name. Returns a list of the returns values of the hooks (in the order the hooks were added)"""
+ return map(lambda x: x(**kwargs), self.hooks.get(name, []))
- """ Call all hooks registered with this name """
- for func in self.hooks.get(name, []):
- func(**kwargs)
|
Allow hooks to return values (and simplify the code)
|
## Code Before:
""" Hook Manager """
class HookManager(object):
""" Registers an manages hooks. Hooks are callback functions called when the backend does a specific action. """
def __init__(self):
self.hooks = {}
def add_hook(self, name, callback):
""" Add a new hook that can be called with the call_hook function """
hook_list = self.hooks.get(name, [])
hook_list.append(callback)
self.hooks[name] = hook_list
def call_hook(self, name, **kwargs):
""" Call all hooks registered with this name """
for func in self.hooks.get(name, []):
func(**kwargs)
## Instruction:
Allow hooks to return values (and simplify the code)
## Code After:
""" Hook Manager """
class HookManager(object):
""" Registers an manages hooks. Hooks are callback functions called when the backend does a specific action. """
def __init__(self):
self.hooks = {}
def add_hook(self, name, callback):
""" Add a new hook that can be called with the call_hook function """
hook_list = self.hooks.get(name, [])
hook_list.append(callback)
self.hooks[name] = hook_list
def call_hook(self, name, **kwargs):
""" Call all hooks registered with this name. Returns a list of the returns values of the hooks (in the order the hooks were added)"""
return map(lambda x: x(**kwargs), self.hooks.get(name, []))
|
""" Hook Manager """
class HookManager(object):
""" Registers an manages hooks. Hooks are callback functions called when the backend does a specific action. """
def __init__(self):
self.hooks = {}
def add_hook(self, name, callback):
""" Add a new hook that can be called with the call_hook function """
hook_list = self.hooks.get(name, [])
hook_list.append(callback)
self.hooks[name] = hook_list
def call_hook(self, name, **kwargs):
+ """ Call all hooks registered with this name. Returns a list of the returns values of the hooks (in the order the hooks were added)"""
+ return map(lambda x: x(**kwargs), self.hooks.get(name, []))
- """ Call all hooks registered with this name """
- for func in self.hooks.get(name, []):
- func(**kwargs)
|
3ceb39e4bbc4c5de7cbcce9c1ecfe94daa57266e
|
zhihudaily/models.py
|
zhihudaily/models.py
|
from __future__ import absolute_import, unicode_literals
from peewee import Model, IntegerField, CharField
from zhihudaily.configs import Config
class BaseModel(Model):
class Meta:
database = Config.database
class Zhihudaily(BaseModel):
date = IntegerField()
json_news = CharField()
display_date = CharField()
def create_tables():
database.connect()
database.create_tables([Zhihudaily])
|
from __future__ import absolute_import, unicode_literals
from peewee import Model, IntegerField, CharField
from zhihudaily.configs import Config
class BaseModel(Model):
class Meta:
database = Config.database
class Zhihudaily(BaseModel):
date = IntegerField()
json_news = CharField()
display_date = CharField()
def create_tables():
Config.database.connect()
Config.database.create_tables([Zhihudaily])
|
Fix bug when create the datebase table
|
Fix bug when create the datebase table
|
Python
|
mit
|
lord63/zhihudaily,lord63/zhihudaily,lord63/zhihudaily
|
from __future__ import absolute_import, unicode_literals
from peewee import Model, IntegerField, CharField
from zhihudaily.configs import Config
class BaseModel(Model):
class Meta:
database = Config.database
class Zhihudaily(BaseModel):
date = IntegerField()
json_news = CharField()
display_date = CharField()
def create_tables():
- database.connect()
+ Config.database.connect()
- database.create_tables([Zhihudaily])
+ Config.database.create_tables([Zhihudaily])
|
Fix bug when create the datebase table
|
## Code Before:
from __future__ import absolute_import, unicode_literals
from peewee import Model, IntegerField, CharField
from zhihudaily.configs import Config
class BaseModel(Model):
class Meta:
database = Config.database
class Zhihudaily(BaseModel):
date = IntegerField()
json_news = CharField()
display_date = CharField()
def create_tables():
database.connect()
database.create_tables([Zhihudaily])
## Instruction:
Fix bug when create the datebase table
## Code After:
from __future__ import absolute_import, unicode_literals
from peewee import Model, IntegerField, CharField
from zhihudaily.configs import Config
class BaseModel(Model):
class Meta:
database = Config.database
class Zhihudaily(BaseModel):
date = IntegerField()
json_news = CharField()
display_date = CharField()
def create_tables():
Config.database.connect()
Config.database.create_tables([Zhihudaily])
|
from __future__ import absolute_import, unicode_literals
from peewee import Model, IntegerField, CharField
from zhihudaily.configs import Config
class BaseModel(Model):
class Meta:
database = Config.database
class Zhihudaily(BaseModel):
date = IntegerField()
json_news = CharField()
display_date = CharField()
def create_tables():
- database.connect()
+ Config.database.connect()
? +++++++
- database.create_tables([Zhihudaily])
+ Config.database.create_tables([Zhihudaily])
? +++++++
|
ee01e4574ec1a365e87c879a01216249f75c0da8
|
src/commoner/registration/admin.py
|
src/commoner/registration/admin.py
|
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
pass
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
list_filter = ('complete',)
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
Allow filtering of registrations by complete status.
|
Allow filtering of registrations by complete status.
|
Python
|
agpl-3.0
|
cc-archive/commoner,cc-archive/commoner
|
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
- pass
+ list_filter = ('complete',)
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
Allow filtering of registrations by complete status.
|
## Code Before:
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
pass
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
## Instruction:
Allow filtering of registrations by complete status.
## Code After:
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
list_filter = ('complete',)
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
- pass
+ list_filter = ('complete',)
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
a24faf712d8dfba0f6ac9fc295807552dca37ae9
|
custom/inddex/reports/utils.py
|
custom/inddex/reports/utils.py
|
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.generic import GenericTabularReport
from corehq.apps.reports.standard import CustomProjectReport, DatespanMixin
class MultiTabularReport(DatespanMixin, CustomProjectReport, GenericTabularReport):
report_template_path = 'inddex/multi_report.html'
exportable = True
export_only = False
@property
def data_providers(self):
# data providers should supply a title, slug, headers, and rows
return []
@property
def report_context(self):
context = {
'name': self.name,
'export_only': self.export_only
}
if not self.needs_filters:
context['data_providers'] = [{
'title': data_provider.title,
'slug': data_provider.slug,
'headers': DataTablesHeader(
*(DataTablesColumn(header) for header in data_provider.headers),
),
'rows': data_provider.rows,
} for data_provider in self.data_providers]
return context
@property
def export_table(self):
return [
[dp.slug, [dp.headers] + dp.rows]
for dp in self.data_providers
]
|
from itertools import chain
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.generic import GenericTabularReport
from corehq.apps.reports.standard import CustomProjectReport, DatespanMixin
class MultiTabularReport(DatespanMixin, CustomProjectReport, GenericTabularReport):
report_template_path = 'inddex/multi_report.html'
exportable = True
exportable_all = True
export_only = False
@property
def data_providers(self):
# data providers should supply a title, slug, headers, and rows
return []
@property
def report_context(self):
context = {
'name': self.name,
'export_only': self.export_only
}
if not self.needs_filters:
context['data_providers'] = [{
'title': data_provider.title,
'slug': data_provider.slug,
'headers': DataTablesHeader(
*(DataTablesColumn(header) for header in data_provider.headers),
),
'rows': data_provider.rows,
} for data_provider in self.data_providers]
return context
@property
def export_table(self):
return [
[dp.slug, chain([dp.headers], dp.rows)]
for dp in self.data_providers
]
|
Move export to a background process
|
Move export to a background process
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
+ from itertools import chain
+
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.generic import GenericTabularReport
from corehq.apps.reports.standard import CustomProjectReport, DatespanMixin
class MultiTabularReport(DatespanMixin, CustomProjectReport, GenericTabularReport):
report_template_path = 'inddex/multi_report.html'
exportable = True
+ exportable_all = True
export_only = False
@property
def data_providers(self):
# data providers should supply a title, slug, headers, and rows
return []
@property
def report_context(self):
context = {
'name': self.name,
'export_only': self.export_only
}
if not self.needs_filters:
context['data_providers'] = [{
'title': data_provider.title,
'slug': data_provider.slug,
'headers': DataTablesHeader(
*(DataTablesColumn(header) for header in data_provider.headers),
),
'rows': data_provider.rows,
} for data_provider in self.data_providers]
return context
@property
def export_table(self):
return [
- [dp.slug, [dp.headers] + dp.rows]
+ [dp.slug, chain([dp.headers], dp.rows)]
for dp in self.data_providers
]
|
Move export to a background process
|
## Code Before:
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.generic import GenericTabularReport
from corehq.apps.reports.standard import CustomProjectReport, DatespanMixin
class MultiTabularReport(DatespanMixin, CustomProjectReport, GenericTabularReport):
report_template_path = 'inddex/multi_report.html'
exportable = True
export_only = False
@property
def data_providers(self):
# data providers should supply a title, slug, headers, and rows
return []
@property
def report_context(self):
context = {
'name': self.name,
'export_only': self.export_only
}
if not self.needs_filters:
context['data_providers'] = [{
'title': data_provider.title,
'slug': data_provider.slug,
'headers': DataTablesHeader(
*(DataTablesColumn(header) for header in data_provider.headers),
),
'rows': data_provider.rows,
} for data_provider in self.data_providers]
return context
@property
def export_table(self):
return [
[dp.slug, [dp.headers] + dp.rows]
for dp in self.data_providers
]
## Instruction:
Move export to a background process
## Code After:
from itertools import chain
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.generic import GenericTabularReport
from corehq.apps.reports.standard import CustomProjectReport, DatespanMixin
class MultiTabularReport(DatespanMixin, CustomProjectReport, GenericTabularReport):
report_template_path = 'inddex/multi_report.html'
exportable = True
exportable_all = True
export_only = False
@property
def data_providers(self):
# data providers should supply a title, slug, headers, and rows
return []
@property
def report_context(self):
context = {
'name': self.name,
'export_only': self.export_only
}
if not self.needs_filters:
context['data_providers'] = [{
'title': data_provider.title,
'slug': data_provider.slug,
'headers': DataTablesHeader(
*(DataTablesColumn(header) for header in data_provider.headers),
),
'rows': data_provider.rows,
} for data_provider in self.data_providers]
return context
@property
def export_table(self):
return [
[dp.slug, chain([dp.headers], dp.rows)]
for dp in self.data_providers
]
|
+ from itertools import chain
+
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.generic import GenericTabularReport
from corehq.apps.reports.standard import CustomProjectReport, DatespanMixin
class MultiTabularReport(DatespanMixin, CustomProjectReport, GenericTabularReport):
report_template_path = 'inddex/multi_report.html'
exportable = True
+ exportable_all = True
export_only = False
@property
def data_providers(self):
# data providers should supply a title, slug, headers, and rows
return []
@property
def report_context(self):
context = {
'name': self.name,
'export_only': self.export_only
}
if not self.needs_filters:
context['data_providers'] = [{
'title': data_provider.title,
'slug': data_provider.slug,
'headers': DataTablesHeader(
*(DataTablesColumn(header) for header in data_provider.headers),
),
'rows': data_provider.rows,
} for data_provider in self.data_providers]
return context
@property
def export_table(self):
return [
- [dp.slug, [dp.headers] + dp.rows]
? ^^
+ [dp.slug, chain([dp.headers], dp.rows)]
? ++++++ ^ +
for dp in self.data_providers
]
|
71c9b12056de1e1fdcc1effd2fda4c4dd284afab
|
froide/problem/utils.py
|
froide/problem/utils.py
|
from django.core.mail import mail_managers
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
def inform_managers(report):
mail_managers(
_('New problem: {label} [#{reqid}]').format(
label=report.get_kind_display(),
reqid=report.message.request_id
),
'{}\n{}'.format(
report.description,
report.get_absolute_domain_url()
)
)
def inform_user_problem_resolved(report):
if report.auto_submitted or not report.user:
return False
foirequest = report.message.request
subject = _('Problem resolved on your request')
body = render_to_string("problem/email_problem_resolved.txt", {
"user": report.user,
"title": foirequest.title,
"report": report,
"url": report.user.get_autologin_url(
report.message.get_absolute_short_url()
),
"site_name": settings.SITE_NAME
})
report.user.send_mail(subject, body)
return True
|
from django.core.mail import mail_managers
from django.conf import settings
from django.urls import reverse
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
def inform_managers(report):
admin_url = settings.SITE_URL + reverse(
'admin:problem_problemreport_change', args=(report.id,))
mail_managers(
_('New problem: {label} [#{reqid}]').format(
label=report.get_kind_display(),
reqid=report.message.request_id
),
'{}\n\n---\n\n{}\n'.format(
report.description,
report.get_absolute_domain_url(),
admin_url
)
)
def inform_user_problem_resolved(report):
if report.auto_submitted or not report.user:
return False
foirequest = report.message.request
subject = _('Problem resolved on your request')
body = render_to_string("problem/email_problem_resolved.txt", {
"user": report.user,
"title": foirequest.title,
"report": report,
"url": report.user.get_autologin_url(
report.message.get_absolute_short_url()
),
"site_name": settings.SITE_NAME
})
report.user.send_mail(subject, body)
return True
|
Add link to report admin page to report info mail
|
Add link to report admin page to report info mail
|
Python
|
mit
|
stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,fin/froide
|
from django.core.mail import mail_managers
from django.conf import settings
+ from django.urls import reverse
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
def inform_managers(report):
+ admin_url = settings.SITE_URL + reverse(
+ 'admin:problem_problemreport_change', args=(report.id,))
mail_managers(
_('New problem: {label} [#{reqid}]').format(
label=report.get_kind_display(),
reqid=report.message.request_id
),
- '{}\n{}'.format(
+ '{}\n\n---\n\n{}\n'.format(
report.description,
- report.get_absolute_domain_url()
+ report.get_absolute_domain_url(),
+ admin_url
)
)
def inform_user_problem_resolved(report):
if report.auto_submitted or not report.user:
return False
foirequest = report.message.request
subject = _('Problem resolved on your request')
body = render_to_string("problem/email_problem_resolved.txt", {
"user": report.user,
"title": foirequest.title,
"report": report,
"url": report.user.get_autologin_url(
report.message.get_absolute_short_url()
),
"site_name": settings.SITE_NAME
})
report.user.send_mail(subject, body)
return True
|
Add link to report admin page to report info mail
|
## Code Before:
from django.core.mail import mail_managers
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
def inform_managers(report):
mail_managers(
_('New problem: {label} [#{reqid}]').format(
label=report.get_kind_display(),
reqid=report.message.request_id
),
'{}\n{}'.format(
report.description,
report.get_absolute_domain_url()
)
)
def inform_user_problem_resolved(report):
if report.auto_submitted or not report.user:
return False
foirequest = report.message.request
subject = _('Problem resolved on your request')
body = render_to_string("problem/email_problem_resolved.txt", {
"user": report.user,
"title": foirequest.title,
"report": report,
"url": report.user.get_autologin_url(
report.message.get_absolute_short_url()
),
"site_name": settings.SITE_NAME
})
report.user.send_mail(subject, body)
return True
## Instruction:
Add link to report admin page to report info mail
## Code After:
from django.core.mail import mail_managers
from django.conf import settings
from django.urls import reverse
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
def inform_managers(report):
admin_url = settings.SITE_URL + reverse(
'admin:problem_problemreport_change', args=(report.id,))
mail_managers(
_('New problem: {label} [#{reqid}]').format(
label=report.get_kind_display(),
reqid=report.message.request_id
),
'{}\n\n---\n\n{}\n'.format(
report.description,
report.get_absolute_domain_url(),
admin_url
)
)
def inform_user_problem_resolved(report):
if report.auto_submitted or not report.user:
return False
foirequest = report.message.request
subject = _('Problem resolved on your request')
body = render_to_string("problem/email_problem_resolved.txt", {
"user": report.user,
"title": foirequest.title,
"report": report,
"url": report.user.get_autologin_url(
report.message.get_absolute_short_url()
),
"site_name": settings.SITE_NAME
})
report.user.send_mail(subject, body)
return True
|
from django.core.mail import mail_managers
from django.conf import settings
+ from django.urls import reverse
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
def inform_managers(report):
+ admin_url = settings.SITE_URL + reverse(
+ 'admin:problem_problemreport_change', args=(report.id,))
mail_managers(
_('New problem: {label} [#{reqid}]').format(
label=report.get_kind_display(),
reqid=report.message.request_id
),
- '{}\n{}'.format(
+ '{}\n\n---\n\n{}\n'.format(
? +++++++++ ++
report.description,
- report.get_absolute_domain_url()
+ report.get_absolute_domain_url(),
? +
+ admin_url
)
)
def inform_user_problem_resolved(report):
if report.auto_submitted or not report.user:
return False
foirequest = report.message.request
subject = _('Problem resolved on your request')
body = render_to_string("problem/email_problem_resolved.txt", {
"user": report.user,
"title": foirequest.title,
"report": report,
"url": report.user.get_autologin_url(
report.message.get_absolute_short_url()
),
"site_name": settings.SITE_NAME
})
report.user.send_mail(subject, body)
return True
|
53c39934e19fdad7926a8ad7833cd1737b47cf58
|
utilities/errors.py
|
utilities/errors.py
|
import os
import simulators
import numpy as np
import json
"""Calculate Errors on the Spectrum.
For a first go using an fixed SNR of 200 for all observations.
"""
def get_snrinfo(star, obs_num, chip):
"""Load SNR info from json file."""
snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json")
with open(snr_file, "r") as f:
snr_data = json.load(f)
try:
return snr_data[str(star)][str(obs_num)][str(chip)]
except KeyError as e:
print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip))
raise e
def spectrum_error(star, obs_num, chip, error_off=False):
"""Return the spectrum error.
errors = None will perform a normal chi**2 statistic.
"""
if error_off:
errors = None
else:
snr = get_snrinfo(star, obs_num, chip)
if len(snr) == 1:
errors = 1 / np.float(snr[0])
else:
raise NotImplementedError("Haven't checked if an error array can be handled yet.")
return errors
|
import os
import simulators
import numpy as np
import json
import warnings
"""Calculate Errors on the Spectrum.
For a first go using an fixed SNR of 200 for all observations.
"""
def get_snrinfo(star, obs_num, chip):
"""Load SNR info from json file."""
snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json")
with open(snr_file, "r") as f:
snr_data = json.load(f)
try:
return snr_data[str(star)][str(obs_num)][str(chip)]
except KeyError as e:
warnings.warn("No snr data present for {0}-{1}_{2}. "
"Setting error to None instead".format(star, obs_num, chip))
return None
def spectrum_error(star, obs_num, chip, error_off=False):
"""Return the spectrum error.
errors = None will perform a normal chi**2 statistic.
"""
if error_off:
errors = None
else:
snr = get_snrinfo(star, obs_num, chip)
if snr is None:
errors = None
elif len(snr) == 1:
errors = 1 / np.float(snr[0])
else:
raise NotImplementedError("Haven't checked if an error array can be handled yet.")
return errors
|
Handle no snr information in snr file. (for fake simualtions mainly)
|
Handle no snr information in snr file. (for fake simualtions mainly)
|
Python
|
mit
|
jason-neal/companion_simulations,jason-neal/companion_simulations
|
import os
import simulators
import numpy as np
import json
+ import warnings
"""Calculate Errors on the Spectrum.
For a first go using an fixed SNR of 200 for all observations.
"""
def get_snrinfo(star, obs_num, chip):
"""Load SNR info from json file."""
snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json")
with open(snr_file, "r") as f:
snr_data = json.load(f)
try:
return snr_data[str(star)][str(obs_num)][str(chip)]
except KeyError as e:
- print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip))
- raise e
+ warnings.warn("No snr data present for {0}-{1}_{2}. "
+ "Setting error to None instead".format(star, obs_num, chip))
+ return None
def spectrum_error(star, obs_num, chip, error_off=False):
"""Return the spectrum error.
errors = None will perform a normal chi**2 statistic.
"""
if error_off:
errors = None
else:
snr = get_snrinfo(star, obs_num, chip)
+ if snr is None:
+ errors = None
- if len(snr) == 1:
+ elif len(snr) == 1:
errors = 1 / np.float(snr[0])
else:
raise NotImplementedError("Haven't checked if an error array can be handled yet.")
return errors
|
Handle no snr information in snr file. (for fake simualtions mainly)
|
## Code Before:
import os
import simulators
import numpy as np
import json
"""Calculate Errors on the Spectrum.
For a first go using an fixed SNR of 200 for all observations.
"""
def get_snrinfo(star, obs_num, chip):
"""Load SNR info from json file."""
snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json")
with open(snr_file, "r") as f:
snr_data = json.load(f)
try:
return snr_data[str(star)][str(obs_num)][str(chip)]
except KeyError as e:
print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip))
raise e
def spectrum_error(star, obs_num, chip, error_off=False):
"""Return the spectrum error.
errors = None will perform a normal chi**2 statistic.
"""
if error_off:
errors = None
else:
snr = get_snrinfo(star, obs_num, chip)
if len(snr) == 1:
errors = 1 / np.float(snr[0])
else:
raise NotImplementedError("Haven't checked if an error array can be handled yet.")
return errors
## Instruction:
Handle no snr information in snr file. (for fake simualtions mainly)
## Code After:
import os
import simulators
import numpy as np
import json
import warnings
"""Calculate Errors on the Spectrum.
For a first go using an fixed SNR of 200 for all observations.
"""
def get_snrinfo(star, obs_num, chip):
"""Load SNR info from json file."""
snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json")
with open(snr_file, "r") as f:
snr_data = json.load(f)
try:
return snr_data[str(star)][str(obs_num)][str(chip)]
except KeyError as e:
warnings.warn("No snr data present for {0}-{1}_{2}. "
"Setting error to None instead".format(star, obs_num, chip))
return None
def spectrum_error(star, obs_num, chip, error_off=False):
"""Return the spectrum error.
errors = None will perform a normal chi**2 statistic.
"""
if error_off:
errors = None
else:
snr = get_snrinfo(star, obs_num, chip)
if snr is None:
errors = None
elif len(snr) == 1:
errors = 1 / np.float(snr[0])
else:
raise NotImplementedError("Haven't checked if an error array can be handled yet.")
return errors
|
import os
import simulators
import numpy as np
import json
+ import warnings
"""Calculate Errors on the Spectrum.
For a first go using an fixed SNR of 200 for all observations.
"""
def get_snrinfo(star, obs_num, chip):
"""Load SNR info from json file."""
snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json")
with open(snr_file, "r") as f:
snr_data = json.load(f)
try:
return snr_data[str(star)][str(obs_num)][str(chip)]
except KeyError as e:
- print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip))
- raise e
+ warnings.warn("No snr data present for {0}-{1}_{2}. "
+ "Setting error to None instead".format(star, obs_num, chip))
+ return None
def spectrum_error(star, obs_num, chip, error_off=False):
"""Return the spectrum error.
errors = None will perform a normal chi**2 statistic.
"""
if error_off:
errors = None
else:
snr = get_snrinfo(star, obs_num, chip)
+ if snr is None:
+ errors = None
- if len(snr) == 1:
+ elif len(snr) == 1:
? ++
errors = 1 / np.float(snr[0])
else:
raise NotImplementedError("Haven't checked if an error array can be handled yet.")
return errors
|
20224e4fe8b93dee087dd7a455f9709b9795a026
|
app/models.py
|
app/models.py
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
Make title unique Talk property
|
Make title unique Talk property
|
Python
|
mit
|
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
- title = database.Column(database.String(128), nullable=False)
+ title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
Make title unique Talk property
|
## Code Before:
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
## Instruction:
Make title unique Talk property
## Code After:
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
- title = database.Column(database.String(128), nullable=False)
+ title = database.Column(database.String(128), unique=True, nullable=False)
? +++++++++++++
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
535ac4c6eae416461e11f33c1a1ef67e92c73914
|
tests/test_exception_wrapping.py
|
tests/test_exception_wrapping.py
|
import safe
def test_simple_exception():
class MockReponse(object):
def json(self):
return {'status': False,
'method': 'synchronize',
'module': 'cluster',
'error': {'message': 'Example error'}}
exception = safe.library.raise_from_json(MockReponse())
assert str(exception) == 'Example error'
|
import safe
class MockResponse(object):
def __init__(self, data):
self.data = data
def json(self):
return self.data
def test_basic_exception():
error_message = 'Example error'
response = MockResponse({
'status': False,
'method': 'synchronize',
'module': 'cluster',
'error': {'message': error_message}
})
exception = safe.library.raise_from_json(response)
assert str(exception) == error_message
def test_commit_failed_exception():
error_message = 'Default ipv4 gateway is not on eth0 subnet'
response = MockResponse({
'status': False,
'type': 'configuration',
'method': 'smartapply',
'module': 'nsc',
'error': {
'message': 'Apply configuration failed.',
'reason': [{
'url': '/SAFe/sng_network_config/modify/network',
'obj_type': 'configuration',
'type': 'ERROR',
'description': error_message,
'module': 'network'
}]
}
})
exception = safe.library.raise_from_json(response)
assert isinstance(exception, safe.CommitFailed)
assert str(exception) == 'Apply changes failed: ' + error_message
assert len(exception.reasons) == 1
reason = exception.reasons[0]
assert reason.obj == 'configuration'
assert reason.module == 'network'
assert reason.description == error_message
|
Add a commit failed test
|
Add a commit failed test
|
Python
|
mpl-2.0
|
sangoma/safepy2,leonardolang/safepy2
|
import safe
- def test_simple_exception():
- class MockReponse(object):
+ class MockResponse(object):
+ def __init__(self, data):
+ self.data = data
- def json(self):
- return {'status': False,
- 'method': 'synchronize',
- 'module': 'cluster',
- 'error': {'message': 'Example error'}}
- exception = safe.library.raise_from_json(MockReponse())
- assert str(exception) == 'Example error'
+ def json(self):
+ return self.data
+
+ def test_basic_exception():
+ error_message = 'Example error'
+ response = MockResponse({
+ 'status': False,
+ 'method': 'synchronize',
+ 'module': 'cluster',
+ 'error': {'message': error_message}
+ })
+
+ exception = safe.library.raise_from_json(response)
+ assert str(exception) == error_message
+
+
+ def test_commit_failed_exception():
+ error_message = 'Default ipv4 gateway is not on eth0 subnet'
+ response = MockResponse({
+ 'status': False,
+ 'type': 'configuration',
+ 'method': 'smartapply',
+ 'module': 'nsc',
+ 'error': {
+ 'message': 'Apply configuration failed.',
+ 'reason': [{
+ 'url': '/SAFe/sng_network_config/modify/network',
+ 'obj_type': 'configuration',
+ 'type': 'ERROR',
+ 'description': error_message,
+ 'module': 'network'
+ }]
+ }
+ })
+
+ exception = safe.library.raise_from_json(response)
+ assert isinstance(exception, safe.CommitFailed)
+ assert str(exception) == 'Apply changes failed: ' + error_message
+ assert len(exception.reasons) == 1
+
+ reason = exception.reasons[0]
+ assert reason.obj == 'configuration'
+ assert reason.module == 'network'
+ assert reason.description == error_message
+
|
Add a commit failed test
|
## Code Before:
import safe
def test_simple_exception():
class MockReponse(object):
def json(self):
return {'status': False,
'method': 'synchronize',
'module': 'cluster',
'error': {'message': 'Example error'}}
exception = safe.library.raise_from_json(MockReponse())
assert str(exception) == 'Example error'
## Instruction:
Add a commit failed test
## Code After:
import safe
class MockResponse(object):
def __init__(self, data):
self.data = data
def json(self):
return self.data
def test_basic_exception():
error_message = 'Example error'
response = MockResponse({
'status': False,
'method': 'synchronize',
'module': 'cluster',
'error': {'message': error_message}
})
exception = safe.library.raise_from_json(response)
assert str(exception) == error_message
def test_commit_failed_exception():
error_message = 'Default ipv4 gateway is not on eth0 subnet'
response = MockResponse({
'status': False,
'type': 'configuration',
'method': 'smartapply',
'module': 'nsc',
'error': {
'message': 'Apply configuration failed.',
'reason': [{
'url': '/SAFe/sng_network_config/modify/network',
'obj_type': 'configuration',
'type': 'ERROR',
'description': error_message,
'module': 'network'
}]
}
})
exception = safe.library.raise_from_json(response)
assert isinstance(exception, safe.CommitFailed)
assert str(exception) == 'Apply changes failed: ' + error_message
assert len(exception.reasons) == 1
reason = exception.reasons[0]
assert reason.obj == 'configuration'
assert reason.module == 'network'
assert reason.description == error_message
|
import safe
- def test_simple_exception():
- class MockReponse(object):
? ----
+ class MockResponse(object):
? +
+ def __init__(self, data):
+ self.data = data
- def json(self):
- return {'status': False,
- 'method': 'synchronize',
- 'module': 'cluster',
- 'error': {'message': 'Example error'}}
+ def json(self):
+ return self.data
+
+
+ def test_basic_exception():
+ error_message = 'Example error'
+ response = MockResponse({
+ 'status': False,
+ 'method': 'synchronize',
+ 'module': 'cluster',
+ 'error': {'message': error_message}
+ })
+
- exception = safe.library.raise_from_json(MockReponse())
? ^^^^^ - -
+ exception = safe.library.raise_from_json(response)
? ^ +
- assert str(exception) == 'Example error'
? --------- ^
+ assert str(exception) == error_message
? ^^^^^^^^
+
+
+ def test_commit_failed_exception():
+ error_message = 'Default ipv4 gateway is not on eth0 subnet'
+ response = MockResponse({
+ 'status': False,
+ 'type': 'configuration',
+ 'method': 'smartapply',
+ 'module': 'nsc',
+ 'error': {
+ 'message': 'Apply configuration failed.',
+ 'reason': [{
+ 'url': '/SAFe/sng_network_config/modify/network',
+ 'obj_type': 'configuration',
+ 'type': 'ERROR',
+ 'description': error_message,
+ 'module': 'network'
+ }]
+ }
+ })
+
+ exception = safe.library.raise_from_json(response)
+ assert isinstance(exception, safe.CommitFailed)
+ assert str(exception) == 'Apply changes failed: ' + error_message
+ assert len(exception.reasons) == 1
+
+ reason = exception.reasons[0]
+ assert reason.obj == 'configuration'
+ assert reason.module == 'network'
+ assert reason.description == error_message
|
8b6d10e8339510bbc745a3167fd1d5a60422b370
|
tests/test_planner.py
|
tests/test_planner.py
|
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_init_pieces(self):
self.assertEqual(len(self.planner.pieces_needed), 3)
self.assertEqual(self.planner.pieces_needed[0].length, 75)
def test_init_stock(self):
self.assertEqual(len(self.planner.stock_sizes), 3)
self.assertEqual(self.planner.stock_sizes, [50, 80, 120])
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
Add tests for planner init
|
Add tests for planner init
|
Python
|
mit
|
alanc10n/py-cutplanner
|
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
+
+ def test_init_pieces(self):
+ self.assertEqual(len(self.planner.pieces_needed), 3)
+ self.assertEqual(self.planner.pieces_needed[0].length, 75)
+
+ def test_init_stock(self):
+ self.assertEqual(len(self.planner.stock_sizes), 3)
+ self.assertEqual(self.planner.stock_sizes, [50, 80, 120])
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
Add tests for planner init
|
## Code Before:
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
## Instruction:
Add tests for planner init
## Code After:
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_init_pieces(self):
self.assertEqual(len(self.planner.pieces_needed), 3)
self.assertEqual(self.planner.pieces_needed[0].length, 75)
def test_init_stock(self):
self.assertEqual(len(self.planner.stock_sizes), 3)
self.assertEqual(self.planner.stock_sizes, [50, 80, 120])
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
+
+ def test_init_pieces(self):
+ self.assertEqual(len(self.planner.pieces_needed), 3)
+ self.assertEqual(self.planner.pieces_needed[0].length, 75)
+
+ def test_init_stock(self):
+ self.assertEqual(len(self.planner.stock_sizes), 3)
+ self.assertEqual(self.planner.stock_sizes, [50, 80, 120])
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
00ce59d43c4208846234652a0746f048836493f2
|
src/ggrc/services/signals.py
|
src/ggrc/services/signals.py
|
from blinker import Namespace
class Signals(object):
signals = Namespace()
custom_attribute_changed = signals.signal(
"Custom Attribute updated",
"""
Indicates that a custom attribute was successfully saved to database.
:obj: The model instance
:value: New custom attribute value
:service: The instance of model handling the Custom Attribute update
operation
""",
)
|
from blinker import Namespace
class Signals(object):
signals = Namespace()
custom_attribute_changed = signals.signal(
"Custom Attribute updated",
"""
Indicates that a custom attribute was successfully saved to database.
:obj: The model instance
:value: New custom attribute value
:service: The instance of model handling the Custom Attribute update
operation
""",
)
|
Fix new CA signal message
|
Fix new CA signal message
|
Python
|
apache-2.0
|
selahssea/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core
|
from blinker import Namespace
+
class Signals(object):
signals = Namespace()
custom_attribute_changed = signals.signal(
- "Custom Attribute updated",
+ "Custom Attribute updated",
- """
+ """
- Indicates that a custom attribute was successfully saved to database.
+ Indicates that a custom attribute was successfully saved to database.
- :obj: The model instance
+ :obj: The model instance
- :value: New custom attribute value
+ :value: New custom attribute value
- :service: The instance of model handling the Custom Attribute update
+ :service: The instance of model handling the Custom Attribute update
- operation
+ operation
- """,
+ """,
)
-
|
Fix new CA signal message
|
## Code Before:
from blinker import Namespace
class Signals(object):
signals = Namespace()
custom_attribute_changed = signals.signal(
"Custom Attribute updated",
"""
Indicates that a custom attribute was successfully saved to database.
:obj: The model instance
:value: New custom attribute value
:service: The instance of model handling the Custom Attribute update
operation
""",
)
## Instruction:
Fix new CA signal message
## Code After:
from blinker import Namespace
class Signals(object):
signals = Namespace()
custom_attribute_changed = signals.signal(
"Custom Attribute updated",
"""
Indicates that a custom attribute was successfully saved to database.
:obj: The model instance
:value: New custom attribute value
:service: The instance of model handling the Custom Attribute update
operation
""",
)
|
from blinker import Namespace
+
class Signals(object):
signals = Namespace()
custom_attribute_changed = signals.signal(
- "Custom Attribute updated",
+ "Custom Attribute updated",
? ++
- """
+ """
? ++
- Indicates that a custom attribute was successfully saved to database.
+ Indicates that a custom attribute was successfully saved to database.
? ++
- :obj: The model instance
+ :obj: The model instance
? ++
- :value: New custom attribute value
+ :value: New custom attribute value
? ++
- :service: The instance of model handling the Custom Attribute update
+ :service: The instance of model handling the Custom Attribute update
? ++
- operation
+ operation
? ++
- """,
+ """,
? ++
)
-
|
2b7da7ba1ae2eac069762c221c279aa9f204775d
|
praw/exceptions.py
|
praw/exceptions.py
|
class PRAWException(Exception):
"""The base PRAW Exception that all other exception classes extend."""
class APIException(PRAWException):
"""Indicate exception that involve responses from Reddit's API."""
def __init__(self, error_type, message, field):
"""Initialize an instance of APIException.
:param error_type: The error type set on Reddit's end.
:param message: The associated message for the error.
:param field: The input field associated with the error if available.
.. note: Calling `str()` on the instance returns `unicode_escape`d
ASCII string because the message may be localized and may contain
UNICODE characters. If you want a non-escaped message, access
the `message` atribute on the instance.
"""
error_str = u'{}: \'{}\''.format(error_type, message)
if field:
error_str += u' on field \'{}\''.format(field)
error_str = error_str.encode('unicode_escape').decode('ascii')
super(APIException, self).__init__(error_str)
self.error_type = error_type
self.message = message
self.field = field
class ClientException(PRAWException):
"""Indicate exceptions that don't involve interaction with Reddit's API."""
|
class PRAWException(Exception):
"""The base PRAW Exception that all other exception classes extend."""
class APIException(PRAWException):
"""Indicate exception that involve responses from Reddit's API."""
def __init__(self, error_type, message, field):
"""Initialize an instance of APIException.
:param error_type: The error type set on Reddit's end.
:param message: The associated message for the error.
:param field: The input field associated with the error if available.
.. note:: Calling ``str()`` on the instance returns
``unicode_escape``-d ASCII string because the message may be
localized and may contain UNICODE characters. If you want a
non-escaped message, access the ``message`` attribute on
the instance.
"""
error_str = u'{}: \'{}\''.format(error_type, message)
if field:
error_str += u' on field \'{}\''.format(field)
error_str = error_str.encode('unicode_escape').decode('ascii')
super(APIException, self).__init__(error_str)
self.error_type = error_type
self.message = message
self.field = field
class ClientException(PRAWException):
"""Indicate exceptions that don't involve interaction with Reddit's API."""
|
Fix a few Sphinx typos
|
Fix a few Sphinx typos
* `.. note:` -> `.. note::` to prevent the `note` from being interpreted as a comment, which wouldn't show up when the docs are rendered.
* Double backticks for the code bits.
* Correct typo ("atribute" -> "attribute").
* Sphinx doesn't like characters immediately after the backticks, so add a hyphen in to prevent it from being rendered incorrectly.
|
Python
|
bsd-2-clause
|
gschizas/praw,praw-dev/praw,13steinj/praw,leviroth/praw,gschizas/praw,praw-dev/praw,13steinj/praw,leviroth/praw
|
class PRAWException(Exception):
"""The base PRAW Exception that all other exception classes extend."""
class APIException(PRAWException):
"""Indicate exception that involve responses from Reddit's API."""
def __init__(self, error_type, message, field):
"""Initialize an instance of APIException.
:param error_type: The error type set on Reddit's end.
:param message: The associated message for the error.
:param field: The input field associated with the error if available.
- .. note: Calling `str()` on the instance returns `unicode_escape`d
+ .. note:: Calling ``str()`` on the instance returns
- ASCII string because the message may be localized and may contain
- UNICODE characters. If you want a non-escaped message, access
- the `message` atribute on the instance.
+ ``unicode_escape``-d ASCII string because the message may be
+ localized and may contain UNICODE characters. If you want a
+ non-escaped message, access the ``message`` attribute on
+ the instance.
"""
error_str = u'{}: \'{}\''.format(error_type, message)
if field:
error_str += u' on field \'{}\''.format(field)
error_str = error_str.encode('unicode_escape').decode('ascii')
super(APIException, self).__init__(error_str)
self.error_type = error_type
self.message = message
self.field = field
class ClientException(PRAWException):
"""Indicate exceptions that don't involve interaction with Reddit's API."""
|
Fix a few Sphinx typos
|
## Code Before:
class PRAWException(Exception):
"""The base PRAW Exception that all other exception classes extend."""
class APIException(PRAWException):
"""Indicate exception that involve responses from Reddit's API."""
def __init__(self, error_type, message, field):
"""Initialize an instance of APIException.
:param error_type: The error type set on Reddit's end.
:param message: The associated message for the error.
:param field: The input field associated with the error if available.
.. note: Calling `str()` on the instance returns `unicode_escape`d
ASCII string because the message may be localized and may contain
UNICODE characters. If you want a non-escaped message, access
the `message` atribute on the instance.
"""
error_str = u'{}: \'{}\''.format(error_type, message)
if field:
error_str += u' on field \'{}\''.format(field)
error_str = error_str.encode('unicode_escape').decode('ascii')
super(APIException, self).__init__(error_str)
self.error_type = error_type
self.message = message
self.field = field
class ClientException(PRAWException):
"""Indicate exceptions that don't involve interaction with Reddit's API."""
## Instruction:
Fix a few Sphinx typos
## Code After:
class PRAWException(Exception):
"""The base PRAW Exception that all other exception classes extend."""
class APIException(PRAWException):
"""Indicate exception that involve responses from Reddit's API."""
def __init__(self, error_type, message, field):
"""Initialize an instance of APIException.
:param error_type: The error type set on Reddit's end.
:param message: The associated message for the error.
:param field: The input field associated with the error if available.
.. note:: Calling ``str()`` on the instance returns
``unicode_escape``-d ASCII string because the message may be
localized and may contain UNICODE characters. If you want a
non-escaped message, access the ``message`` attribute on
the instance.
"""
error_str = u'{}: \'{}\''.format(error_type, message)
if field:
error_str += u' on field \'{}\''.format(field)
error_str = error_str.encode('unicode_escape').decode('ascii')
super(APIException, self).__init__(error_str)
self.error_type = error_type
self.message = message
self.field = field
class ClientException(PRAWException):
"""Indicate exceptions that don't involve interaction with Reddit's API."""
|
class PRAWException(Exception):
"""The base PRAW Exception that all other exception classes extend."""
class APIException(PRAWException):
"""Indicate exception that involve responses from Reddit's API."""
def __init__(self, error_type, message, field):
"""Initialize an instance of APIException.
:param error_type: The error type set on Reddit's end.
:param message: The associated message for the error.
:param field: The input field associated with the error if available.
- .. note: Calling `str()` on the instance returns `unicode_escape`d
? ------------------
+ .. note:: Calling ``str()`` on the instance returns
? + + +
- ASCII string because the message may be localized and may contain
- UNICODE characters. If you want a non-escaped message, access
- the `message` atribute on the instance.
+ ``unicode_escape``-d ASCII string because the message may be
+ localized and may contain UNICODE characters. If you want a
+ non-escaped message, access the ``message`` attribute on
+ the instance.
"""
error_str = u'{}: \'{}\''.format(error_type, message)
if field:
error_str += u' on field \'{}\''.format(field)
error_str = error_str.encode('unicode_escape').decode('ascii')
super(APIException, self).__init__(error_str)
self.error_type = error_type
self.message = message
self.field = field
class ClientException(PRAWException):
"""Indicate exceptions that don't involve interaction with Reddit's API."""
|
ffbe699a8435dd0abfb43a37c8528257cdaf386d
|
pymogilefs/request.py
|
pymogilefs/request.py
|
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
class Request:
def __init__(self, config, **kwargs):
self.config = config
self._kwargs = kwargs or {}
def __bytes__(self):
kwargs = urlencode(self._kwargs)
return ('%s %s\r\n' % (self.config.COMMAND, kwargs)).encode('utf-8')
|
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
class Request:
def __init__(self, config, **kwargs):
self.config = config
self._kwargs = kwargs or {}
def __bytes__(self):
kwargs = urlencode(self._kwargs)
return ('%s %s\r\n' % (self.config.COMMAND, kwargs)).encode('utf-8')
# Python 2.7 compatibility
def __str__(self):
return self.__bytes__().decode()
|
Add __str__/__bytes__ Python 2.7 compatibility
|
Add __str__/__bytes__ Python 2.7 compatibility
|
Python
|
mit
|
bwind/pymogilefs,bwind/pymogilefs
|
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
class Request:
def __init__(self, config, **kwargs):
self.config = config
self._kwargs = kwargs or {}
def __bytes__(self):
kwargs = urlencode(self._kwargs)
return ('%s %s\r\n' % (self.config.COMMAND, kwargs)).encode('utf-8')
+ # Python 2.7 compatibility
+ def __str__(self):
+ return self.__bytes__().decode()
+
|
Add __str__/__bytes__ Python 2.7 compatibility
|
## Code Before:
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
class Request:
def __init__(self, config, **kwargs):
self.config = config
self._kwargs = kwargs or {}
def __bytes__(self):
kwargs = urlencode(self._kwargs)
return ('%s %s\r\n' % (self.config.COMMAND, kwargs)).encode('utf-8')
## Instruction:
Add __str__/__bytes__ Python 2.7 compatibility
## Code After:
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
class Request:
def __init__(self, config, **kwargs):
self.config = config
self._kwargs = kwargs or {}
def __bytes__(self):
kwargs = urlencode(self._kwargs)
return ('%s %s\r\n' % (self.config.COMMAND, kwargs)).encode('utf-8')
# Python 2.7 compatibility
def __str__(self):
return self.__bytes__().decode()
|
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
class Request:
def __init__(self, config, **kwargs):
self.config = config
self._kwargs = kwargs or {}
def __bytes__(self):
kwargs = urlencode(self._kwargs)
return ('%s %s\r\n' % (self.config.COMMAND, kwargs)).encode('utf-8')
+
+ # Python 2.7 compatibility
+ def __str__(self):
+ return self.__bytes__().decode()
|
7c47a2960d644b34ce3ff569042fb5e965270e8c
|
netsecus/task.py
|
netsecus/task.py
|
from __future__ import unicode_literals
class Task(object):
def __init__(self, taskID, sheetID, name, description, maxPoints, reachedPoints=0):
self.id = taskID
self.sheetID = sheetID
self.name = name
self.description = description
self.maxPoints = maxPoints
self.reachedPoints = reachedPoints
|
from __future__ import unicode_literals
class Task(object):
def __init__(self, taskID, sheetID, name, description, maxPoints):
self.id = taskID
self.sheetID = sheetID
self.name = name
self.description = description
self.maxPoints = maxPoints
|
Remove unneeded 'reachedPoints' variable from Task class
|
Remove unneeded 'reachedPoints' variable from Task class
|
Python
|
mit
|
hhucn/netsec-uebungssystem,hhucn/netsec-uebungssystem,hhucn/netsec-uebungssystem
|
from __future__ import unicode_literals
class Task(object):
- def __init__(self, taskID, sheetID, name, description, maxPoints, reachedPoints=0):
+ def __init__(self, taskID, sheetID, name, description, maxPoints):
self.id = taskID
self.sheetID = sheetID
self.name = name
self.description = description
self.maxPoints = maxPoints
- self.reachedPoints = reachedPoints
|
Remove unneeded 'reachedPoints' variable from Task class
|
## Code Before:
from __future__ import unicode_literals
class Task(object):
def __init__(self, taskID, sheetID, name, description, maxPoints, reachedPoints=0):
self.id = taskID
self.sheetID = sheetID
self.name = name
self.description = description
self.maxPoints = maxPoints
self.reachedPoints = reachedPoints
## Instruction:
Remove unneeded 'reachedPoints' variable from Task class
## Code After:
from __future__ import unicode_literals
class Task(object):
def __init__(self, taskID, sheetID, name, description, maxPoints):
self.id = taskID
self.sheetID = sheetID
self.name = name
self.description = description
self.maxPoints = maxPoints
|
from __future__ import unicode_literals
class Task(object):
- def __init__(self, taskID, sheetID, name, description, maxPoints, reachedPoints=0):
? -----------------
+ def __init__(self, taskID, sheetID, name, description, maxPoints):
self.id = taskID
self.sheetID = sheetID
self.name = name
self.description = description
self.maxPoints = maxPoints
- self.reachedPoints = reachedPoints
|
31c79697db0d5c973cff9b845ed28845695ecb02
|
website/addons/twofactor/views.py
|
website/addons/twofactor/views.py
|
import httplib as http
from framework import request
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from website.project.decorators import must_have_addon
@must_be_logged_in
@must_have_addon('twofactor', 'user')
def user_settings(user_addon, *args, **kwargs):
code = request.json.get('code')
if code is None:
raise HTTPError(code=http.BAD_REQUEST)
if user_addon.verify_code(code):
user_addon.is_confirmed = True
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
))
|
import httplib as http
from framework import request
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from website.project.decorators import must_have_addon
@must_be_logged_in
@must_have_addon('twofactor', 'user')
def user_settings(user_addon, *args, **kwargs):
code = request.json.get('code')
if code is None:
raise HTTPError(code=http.BAD_REQUEST)
if user_addon.verify_code(code):
user_addon.is_confirmed = True
user_addon.save()
return {'message': 'Successfully verified two-factor authentication.'}, http.OK
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
))
|
Fix response when user successfully confirms 2fa
|
Fix response when user successfully confirms 2fa
|
Python
|
apache-2.0
|
CenterForOpenScience/osf.io,revanthkolli/osf.io,jnayak1/osf.io,mluo613/osf.io,caneruguz/osf.io,alexschiller/osf.io,adlius/osf.io,himanshuo/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,jeffreyliu3230/osf.io,cslzchen/osf.io,cwisecarver/osf.io,billyhunt/osf.io,SSJohns/osf.io,cosenal/osf.io,GageGaskins/osf.io,kushG/osf.io,zamattiac/osf.io,njantrania/osf.io,petermalcolm/osf.io,jinluyuan/osf.io,bdyetton/prettychart,monikagrabowska/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,chennan47/osf.io,haoyuchen1992/osf.io,cwisecarver/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,wearpants/osf.io,binoculars/osf.io,jolene-esposito/osf.io,billyhunt/osf.io,reinaH/osf.io,kushG/osf.io,GageGaskins/osf.io,kwierman/osf.io,leb2dg/osf.io,felliott/osf.io,revanthkolli/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,Nesiehr/osf.io,acshi/osf.io,barbour-em/osf.io,doublebits/osf.io,acshi/osf.io,zkraime/osf.io,TomBaxter/osf.io,kushG/osf.io,baylee-d/osf.io,adlius/osf.io,cosenal/osf.io,jolene-esposito/osf.io,kushG/osf.io,njantrania/osf.io,DanielSBrown/osf.io,sbt9uc/osf.io,adlius/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,zachjanicki/osf.io,SSJohns/osf.io,TomBaxter/osf.io,cldershem/osf.io,reinaH/osf.io,revanthkolli/osf.io,zachjanicki/osf.io,sloria/osf.io,felliott/osf.io,hmoco/osf.io,hmoco/osf.io,brianjgeiger/osf.io,HarryRybacki/osf.io,danielneis/osf.io,emetsger/osf.io,chennan47/osf.io,icereval/osf.io,RomanZWang/osf.io,jeffreyliu3230/osf.io,kch8qx/osf.io,chennan47/osf.io,binoculars/osf.io,KAsante95/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,cwisecarver/osf.io,AndrewSallans/osf.io,mluo613/osf.io,alexschiller/osf.io,fabianvf/osf.io,alexschiller/osf.io,mluke93/osf.io,abought/osf.io,MerlinZhang/osf.io,zachjanicki/osf.io,kch8qx/osf.io,MerlinZhang/osf.io,dplorimer/osf,cosenal/osf.io,Nesiehr/osf.io,mluo613/osf.io,aaxelb/osf.io,asanfilippo7/osf.io,arpitar/osf.io,cwisecarver/osf.io,lamdnhan/osf.io,arpitar/osf.io,ckc6cz/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,wearpants/osf.io,alexschiller/osf.io,TomBaxter/osf.io,icereval/osf.io,doublebits/osf.io,crcresearch/osf.io,HarryRybacki/osf.io,KAsante95/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,dplorimer/osf,GaryKriebel/osf.io,doublebits/osf.io,samchrisinger/osf.io,fabianvf/osf.io,wearpants/osf.io,jinluyuan/osf.io,chrisseto/osf.io,samanehsan/osf.io,mluke93/osf.io,leb2dg/osf.io,GaryKriebel/osf.io,laurenrevere/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,cslzchen/osf.io,ckc6cz/osf.io,pattisdr/osf.io,brandonPurvis/osf.io,leb2dg/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,sloria/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,arpitar/osf.io,aaxelb/osf.io,KAsante95/osf.io,amyshi188/osf.io,HarryRybacki/osf.io,brandonPurvis/osf.io,Nesiehr/osf.io,cosenal/osf.io,jinluyuan/osf.io,zamattiac/osf.io,abought/osf.io,KAsante95/osf.io,rdhyee/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,acshi/osf.io,sbt9uc/osf.io,chrisseto/osf.io,felliott/osf.io,Johnetordoff/osf.io,DanielSBrown/osf.io,emetsger/osf.io,caseyrygt/osf.io,caseyrollins/osf.io,TomHeatwole/osf.io,lamdnhan/osf.io,lyndsysimon/osf.io,crcresearch/osf.io,zkraime/osf.io,mluo613/osf.io,brandonPurvis/osf.io,ckc6cz/osf.io,erinspace/osf.io,jnayak1/osf.io,HarryRybacki/osf.io,mluke93/osf.io,felliott/osf.io,mfraezz/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,chrisseto/osf.io,Nesiehr/osf.io,SSJohns/osf.io,dplorimer/osf,jmcarp/osf.io,cslzchen/osf.io,crcresearch/osf.io,samchrisinger/osf.io,revanthkolli/osf.io,bdyetton/prettychart,abought/osf.io,erinspace/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,lyndsysimon/osf.io,cslzchen/osf.io,reinaH/osf.io,wearpants/osf.io,zkraime/osf.io,lamdnhan/osf.io,jeffreyliu3230/osf.io,RomanZWang/osf.io,lyndsysimon/osf.io,caneruguz/osf.io,GageGaskins/osf.io,mfraezz/osf.io,pattisdr/osf.io,baylee-d/osf.io,Ghalko/osf.io,GaryKriebel/osf.io,chrisseto/osf.io,Ghalko/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,acshi/osf.io,cldershem/osf.io,rdhyee/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,arpitar/osf.io,baylee-d/osf.io,samanehsan/osf.io,rdhyee/osf.io,acshi/osf.io,mfraezz/osf.io,njantrania/osf.io,monikagrabowska/osf.io,fabianvf/osf.io,mattclark/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,saradbowman/osf.io,GaryKriebel/osf.io,fabianvf/osf.io,abought/osf.io,kwierman/osf.io,aaxelb/osf.io,MerlinZhang/osf.io,barbour-em/osf.io,jinluyuan/osf.io,monikagrabowska/osf.io,sbt9uc/osf.io,SSJohns/osf.io,KAsante95/osf.io,dplorimer/osf,himanshuo/osf.io,jnayak1/osf.io,barbour-em/osf.io,cldershem/osf.io,sloria/osf.io,HalcyonChimera/osf.io,sbt9uc/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,Ghalko/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,leb2dg/osf.io,mfraezz/osf.io,jmcarp/osf.io,AndrewSallans/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,petermalcolm/osf.io,reinaH/osf.io,erinspace/osf.io,mluo613/osf.io,zachjanicki/osf.io,jolene-esposito/osf.io,laurenrevere/osf.io,amyshi188/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,ckc6cz/osf.io,hmoco/osf.io,zkraime/osf.io,doublebits/osf.io,kwierman/osf.io,jeffreyliu3230/osf.io,himanshuo/osf.io,jmcarp/osf.io,emetsger/osf.io,himanshuo/osf.io,lamdnhan/osf.io,icereval/osf.io,asanfilippo7/osf.io,doublebits/osf.io,bdyetton/prettychart,njantrania/osf.io,hmoco/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,lyndsysimon/osf.io,asanfilippo7/osf.io,aaxelb/osf.io,samanehsan/osf.io,cldershem/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,adlius/osf.io,kch8qx/osf.io,emetsger/osf.io,danielneis/osf.io,mluke93/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,amyshi188/osf.io,ticklemepierce/osf.io,jolene-esposito/osf.io,HalcyonChimera/osf.io,danielneis/osf.io,kwierman/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,danielneis/osf.io,petermalcolm/osf.io,brandonPurvis/osf.io,barbour-em/osf.io,Ghalko/osf.io,petermalcolm/osf.io,ZobairAlijan/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,billyhunt/osf.io,bdyetton/prettychart,zamattiac/osf.io
|
import httplib as http
from framework import request
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from website.project.decorators import must_have_addon
@must_be_logged_in
@must_have_addon('twofactor', 'user')
def user_settings(user_addon, *args, **kwargs):
code = request.json.get('code')
if code is None:
raise HTTPError(code=http.BAD_REQUEST)
if user_addon.verify_code(code):
user_addon.is_confirmed = True
+ user_addon.save()
+ return {'message': 'Successfully verified two-factor authentication.'}, http.OK
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
))
|
Fix response when user successfully confirms 2fa
|
## Code Before:
import httplib as http
from framework import request
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from website.project.decorators import must_have_addon
@must_be_logged_in
@must_have_addon('twofactor', 'user')
def user_settings(user_addon, *args, **kwargs):
code = request.json.get('code')
if code is None:
raise HTTPError(code=http.BAD_REQUEST)
if user_addon.verify_code(code):
user_addon.is_confirmed = True
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
))
## Instruction:
Fix response when user successfully confirms 2fa
## Code After:
import httplib as http
from framework import request
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from website.project.decorators import must_have_addon
@must_be_logged_in
@must_have_addon('twofactor', 'user')
def user_settings(user_addon, *args, **kwargs):
code = request.json.get('code')
if code is None:
raise HTTPError(code=http.BAD_REQUEST)
if user_addon.verify_code(code):
user_addon.is_confirmed = True
user_addon.save()
return {'message': 'Successfully verified two-factor authentication.'}, http.OK
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
))
|
import httplib as http
from framework import request
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from website.project.decorators import must_have_addon
@must_be_logged_in
@must_have_addon('twofactor', 'user')
def user_settings(user_addon, *args, **kwargs):
code = request.json.get('code')
if code is None:
raise HTTPError(code=http.BAD_REQUEST)
if user_addon.verify_code(code):
user_addon.is_confirmed = True
+ user_addon.save()
+ return {'message': 'Successfully verified two-factor authentication.'}, http.OK
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
))
|
498ab0c125180ba89987e797d0094adc02019a8f
|
numba/exttypes/utils.py
|
numba/exttypes/utils.py
|
"Simple utilities related to extension types"
#------------------------------------------------------------------------
# Read state from extension types
#------------------------------------------------------------------------
def get_attributes_type(py_class):
"Return the attribute struct type of the numba extension type"
return py_class.__numba_struct_type
def get_vtab_type(py_class):
"Return the type of the virtual method table of the numba extension type"
return py_class.__numba_vtab_type
def get_method_pointers(py_class):
"Return [(method_name, method_pointer)] given a numba extension type"
return getattr(py_class, '__numba_method_pointers', None)
#------------------------------------------------------------------------
# Type checking
#------------------------------------------------------------------------
def is_numba_class(py_class):
return hasattr(py_class, '__numba_struct_type')
|
"Simple utilities related to extension types"
#------------------------------------------------------------------------
# Read state from extension types
#------------------------------------------------------------------------
def get_attributes_type(py_class):
"Return the attribute struct type of the numba extension type"
return py_class.__numba_struct_type
def get_vtab_type(py_class):
"Return the type of the virtual method table of the numba extension type"
return py_class.__numba_vtab_type
def get_method_pointers(py_class):
"Return [(method_name, method_pointer)] given a numba extension type"
return getattr(py_class, '__numba_method_pointers', None)
#------------------------------------------------------------------------
# Type checking
#------------------------------------------------------------------------
def is_numba_class(py_class):
return hasattr(py_class, '__numba_struct_type')
def get_numba_bases(py_class):
for base in py_class.__mro__:
if is_numba_class(base):
yield base
|
Add utility to iterate over numba base classes
|
Add utility to iterate over numba base classes
|
Python
|
bsd-2-clause
|
jriehl/numba,cpcloud/numba,stuartarchibald/numba,numba/numba,GaZ3ll3/numba,pombredanne/numba,ssarangi/numba,ssarangi/numba,gdementen/numba,gmarkall/numba,pitrou/numba,shiquanwang/numba,numba/numba,seibert/numba,pitrou/numba,sklam/numba,shiquanwang/numba,IntelLabs/numba,seibert/numba,gdementen/numba,stuartarchibald/numba,stonebig/numba,stonebig/numba,cpcloud/numba,ssarangi/numba,GaZ3ll3/numba,gdementen/numba,stefanseefeld/numba,numba/numba,pombredanne/numba,gmarkall/numba,IntelLabs/numba,sklam/numba,numba/numba,cpcloud/numba,numba/numba,IntelLabs/numba,seibert/numba,pitrou/numba,ssarangi/numba,stonebig/numba,gdementen/numba,ssarangi/numba,jriehl/numba,cpcloud/numba,IntelLabs/numba,stefanseefeld/numba,stuartarchibald/numba,pombredanne/numba,stefanseefeld/numba,jriehl/numba,stonebig/numba,jriehl/numba,stuartarchibald/numba,sklam/numba,gmarkall/numba,gdementen/numba,IntelLabs/numba,pombredanne/numba,stefanseefeld/numba,pombredanne/numba,pitrou/numba,GaZ3ll3/numba,GaZ3ll3/numba,gmarkall/numba,jriehl/numba,pitrou/numba,seibert/numba,shiquanwang/numba,sklam/numba,gmarkall/numba,sklam/numba,seibert/numba,GaZ3ll3/numba,cpcloud/numba,stefanseefeld/numba,stuartarchibald/numba,stonebig/numba
|
"Simple utilities related to extension types"
#------------------------------------------------------------------------
# Read state from extension types
#------------------------------------------------------------------------
def get_attributes_type(py_class):
"Return the attribute struct type of the numba extension type"
return py_class.__numba_struct_type
def get_vtab_type(py_class):
"Return the type of the virtual method table of the numba extension type"
return py_class.__numba_vtab_type
def get_method_pointers(py_class):
"Return [(method_name, method_pointer)] given a numba extension type"
return getattr(py_class, '__numba_method_pointers', None)
#------------------------------------------------------------------------
# Type checking
#------------------------------------------------------------------------
def is_numba_class(py_class):
return hasattr(py_class, '__numba_struct_type')
+
+ def get_numba_bases(py_class):
+ for base in py_class.__mro__:
+ if is_numba_class(base):
+ yield base
|
Add utility to iterate over numba base classes
|
## Code Before:
"Simple utilities related to extension types"
#------------------------------------------------------------------------
# Read state from extension types
#------------------------------------------------------------------------
def get_attributes_type(py_class):
"Return the attribute struct type of the numba extension type"
return py_class.__numba_struct_type
def get_vtab_type(py_class):
"Return the type of the virtual method table of the numba extension type"
return py_class.__numba_vtab_type
def get_method_pointers(py_class):
"Return [(method_name, method_pointer)] given a numba extension type"
return getattr(py_class, '__numba_method_pointers', None)
#------------------------------------------------------------------------
# Type checking
#------------------------------------------------------------------------
def is_numba_class(py_class):
return hasattr(py_class, '__numba_struct_type')
## Instruction:
Add utility to iterate over numba base classes
## Code After:
"Simple utilities related to extension types"
#------------------------------------------------------------------------
# Read state from extension types
#------------------------------------------------------------------------
def get_attributes_type(py_class):
"Return the attribute struct type of the numba extension type"
return py_class.__numba_struct_type
def get_vtab_type(py_class):
"Return the type of the virtual method table of the numba extension type"
return py_class.__numba_vtab_type
def get_method_pointers(py_class):
"Return [(method_name, method_pointer)] given a numba extension type"
return getattr(py_class, '__numba_method_pointers', None)
#------------------------------------------------------------------------
# Type checking
#------------------------------------------------------------------------
def is_numba_class(py_class):
return hasattr(py_class, '__numba_struct_type')
def get_numba_bases(py_class):
for base in py_class.__mro__:
if is_numba_class(base):
yield base
|
"Simple utilities related to extension types"
#------------------------------------------------------------------------
# Read state from extension types
#------------------------------------------------------------------------
def get_attributes_type(py_class):
"Return the attribute struct type of the numba extension type"
return py_class.__numba_struct_type
def get_vtab_type(py_class):
"Return the type of the virtual method table of the numba extension type"
return py_class.__numba_vtab_type
def get_method_pointers(py_class):
"Return [(method_name, method_pointer)] given a numba extension type"
return getattr(py_class, '__numba_method_pointers', None)
#------------------------------------------------------------------------
# Type checking
#------------------------------------------------------------------------
def is_numba_class(py_class):
return hasattr(py_class, '__numba_struct_type')
+
+ def get_numba_bases(py_class):
+ for base in py_class.__mro__:
+ if is_numba_class(base):
+ yield base
|
facfa3bcd7d35163e0504ef4b6f9b3b15e778993
|
modeltranslation/management/commands/update_translation_fields.py
|
modeltranslation/management/commands/update_translation_fields.py
|
from django.db.models import F, Q
from django.core.management.base import NoArgsCommand
from modeltranslation.settings import DEFAULT_LANGUAGE
from modeltranslation.translator import translator
from modeltranslation.utils import build_localized_fieldname
class Command(NoArgsCommand):
help = ('Updates the default translation fields of all or the specified'
'translated application using the value of the original field.')
def handle(self, **options):
verbosity = int(options['verbosity'])
if verbosity > 0:
self.stdout.write("Using default language: %s\n" % DEFAULT_LANGUAGE)
for model, trans_opts in translator._registry.items():
if model._meta.abstract:
continue
if verbosity > 0:
self.stdout.write("Updating data of model '%s'\n" % model)
for fieldname in trans_opts.fields:
def_lang_fieldname = build_localized_fieldname(
fieldname, DEFAULT_LANGUAGE)
# We'll only update fields which do not have an existing value
q = Q(**{def_lang_fieldname: None})
field = model._meta.get_field(fieldname)
if field.empty_strings_allowed:
q |= Q(**{def_lang_fieldname: ""})
try:
model.objects.filter(q).rewrite(False).update(
**{def_lang_fieldname: F(fieldname)})
except AttributeError:
# FIXME: Workaround for abstract models. See issue #123 for details.
model.objects.filter(q).update(**{def_lang_fieldname: F(fieldname)})
|
from django.db.models import F, Q
from django.core.management.base import NoArgsCommand
from modeltranslation.settings import DEFAULT_LANGUAGE
from modeltranslation.translator import translator
from modeltranslation.utils import build_localized_fieldname
class Command(NoArgsCommand):
help = ('Updates the default translation fields of all or the specified'
'translated application using the value of the original field.')
def handle(self, **options):
verbosity = int(options['verbosity'])
if verbosity > 0:
self.stdout.write("Using default language: %s\n" % DEFAULT_LANGUAGE)
for model, trans_opts in translator._registry.items():
if model._meta.abstract:
continue
if verbosity > 0:
self.stdout.write("Updating data of model '%s'\n" % model)
for fieldname in trans_opts.fields:
def_lang_fieldname = build_localized_fieldname(
fieldname, DEFAULT_LANGUAGE)
# We'll only update fields which do not have an existing value
q = Q(**{def_lang_fieldname: None})
field = model._meta.get_field(fieldname)
if field.empty_strings_allowed:
q |= Q(**{def_lang_fieldname: ""})
model.objects.filter(q).rewrite(False).update(**{def_lang_fieldname: F(fieldname)})
|
Revert "Added a workaround for abstract models not being handled correctly."
|
Revert "Added a workaround for abstract models not being handled correctly."
This reverts commit a3e44c187b5abfa6d9b360cecc5c1daa746134f5.
|
Python
|
bsd-3-clause
|
marctc/django-modeltranslation,akheron/django-modeltranslation,nanuxbe/django-modeltranslation,marctc/django-modeltranslation,nanuxbe/django-modeltranslation,SideStudios/django-modeltranslation,akheron/django-modeltranslation,yoza/django-modeltranslation,extertioner/django-modeltranslation,extertioner/django-modeltranslation,deschler/django-modeltranslation,acdha/django-modeltranslation,acdha/django-modeltranslation,vstoykov/django-modeltranslation,deschler/django-modeltranslation,yoza/django-modeltranslation,SideStudios/django-modeltranslation,vstoykov/django-modeltranslation
|
from django.db.models import F, Q
from django.core.management.base import NoArgsCommand
from modeltranslation.settings import DEFAULT_LANGUAGE
from modeltranslation.translator import translator
from modeltranslation.utils import build_localized_fieldname
class Command(NoArgsCommand):
help = ('Updates the default translation fields of all or the specified'
'translated application using the value of the original field.')
def handle(self, **options):
verbosity = int(options['verbosity'])
if verbosity > 0:
self.stdout.write("Using default language: %s\n" % DEFAULT_LANGUAGE)
for model, trans_opts in translator._registry.items():
if model._meta.abstract:
continue
if verbosity > 0:
self.stdout.write("Updating data of model '%s'\n" % model)
for fieldname in trans_opts.fields:
def_lang_fieldname = build_localized_fieldname(
fieldname, DEFAULT_LANGUAGE)
# We'll only update fields which do not have an existing value
q = Q(**{def_lang_fieldname: None})
field = model._meta.get_field(fieldname)
if field.empty_strings_allowed:
q |= Q(**{def_lang_fieldname: ""})
- try:
- model.objects.filter(q).rewrite(False).update(
- **{def_lang_fieldname: F(fieldname)})
- except AttributeError:
- # FIXME: Workaround for abstract models. See issue #123 for details.
- model.objects.filter(q).update(**{def_lang_fieldname: F(fieldname)})
+ model.objects.filter(q).rewrite(False).update(**{def_lang_fieldname: F(fieldname)})
|
Revert "Added a workaround for abstract models not being handled correctly."
|
## Code Before:
from django.db.models import F, Q
from django.core.management.base import NoArgsCommand
from modeltranslation.settings import DEFAULT_LANGUAGE
from modeltranslation.translator import translator
from modeltranslation.utils import build_localized_fieldname
class Command(NoArgsCommand):
help = ('Updates the default translation fields of all or the specified'
'translated application using the value of the original field.')
def handle(self, **options):
verbosity = int(options['verbosity'])
if verbosity > 0:
self.stdout.write("Using default language: %s\n" % DEFAULT_LANGUAGE)
for model, trans_opts in translator._registry.items():
if model._meta.abstract:
continue
if verbosity > 0:
self.stdout.write("Updating data of model '%s'\n" % model)
for fieldname in trans_opts.fields:
def_lang_fieldname = build_localized_fieldname(
fieldname, DEFAULT_LANGUAGE)
# We'll only update fields which do not have an existing value
q = Q(**{def_lang_fieldname: None})
field = model._meta.get_field(fieldname)
if field.empty_strings_allowed:
q |= Q(**{def_lang_fieldname: ""})
try:
model.objects.filter(q).rewrite(False).update(
**{def_lang_fieldname: F(fieldname)})
except AttributeError:
# FIXME: Workaround for abstract models. See issue #123 for details.
model.objects.filter(q).update(**{def_lang_fieldname: F(fieldname)})
## Instruction:
Revert "Added a workaround for abstract models not being handled correctly."
## Code After:
from django.db.models import F, Q
from django.core.management.base import NoArgsCommand
from modeltranslation.settings import DEFAULT_LANGUAGE
from modeltranslation.translator import translator
from modeltranslation.utils import build_localized_fieldname
class Command(NoArgsCommand):
help = ('Updates the default translation fields of all or the specified'
'translated application using the value of the original field.')
def handle(self, **options):
verbosity = int(options['verbosity'])
if verbosity > 0:
self.stdout.write("Using default language: %s\n" % DEFAULT_LANGUAGE)
for model, trans_opts in translator._registry.items():
if model._meta.abstract:
continue
if verbosity > 0:
self.stdout.write("Updating data of model '%s'\n" % model)
for fieldname in trans_opts.fields:
def_lang_fieldname = build_localized_fieldname(
fieldname, DEFAULT_LANGUAGE)
# We'll only update fields which do not have an existing value
q = Q(**{def_lang_fieldname: None})
field = model._meta.get_field(fieldname)
if field.empty_strings_allowed:
q |= Q(**{def_lang_fieldname: ""})
model.objects.filter(q).rewrite(False).update(**{def_lang_fieldname: F(fieldname)})
|
from django.db.models import F, Q
from django.core.management.base import NoArgsCommand
from modeltranslation.settings import DEFAULT_LANGUAGE
from modeltranslation.translator import translator
from modeltranslation.utils import build_localized_fieldname
class Command(NoArgsCommand):
help = ('Updates the default translation fields of all or the specified'
'translated application using the value of the original field.')
def handle(self, **options):
verbosity = int(options['verbosity'])
if verbosity > 0:
self.stdout.write("Using default language: %s\n" % DEFAULT_LANGUAGE)
for model, trans_opts in translator._registry.items():
if model._meta.abstract:
continue
if verbosity > 0:
self.stdout.write("Updating data of model '%s'\n" % model)
for fieldname in trans_opts.fields:
def_lang_fieldname = build_localized_fieldname(
fieldname, DEFAULT_LANGUAGE)
# We'll only update fields which do not have an existing value
q = Q(**{def_lang_fieldname: None})
field = model._meta.get_field(fieldname)
if field.empty_strings_allowed:
q |= Q(**{def_lang_fieldname: ""})
- try:
- model.objects.filter(q).rewrite(False).update(
- **{def_lang_fieldname: F(fieldname)})
- except AttributeError:
- # FIXME: Workaround for abstract models. See issue #123 for details.
- model.objects.filter(q).update(**{def_lang_fieldname: F(fieldname)})
? ----
+ model.objects.filter(q).rewrite(False).update(**{def_lang_fieldname: F(fieldname)})
? +++++++++++++++
|
b1bfd9630ef049070b0cd6ae215470d3d1facd40
|
django/contrib/messages/views.py
|
django/contrib/messages/views.py
|
from django.views.generic.edit import FormMixin
from django.contrib import messages
class SuccessMessageMixin(FormMixin):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
from django.contrib import messages
class SuccessMessageMixin(object):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
Remove unnecessary and problematic parent class from SuccessMessageMixin
|
Remove unnecessary and problematic parent class from SuccessMessageMixin
refs #16319, thanks to bmispelon for the catch
|
Python
|
bsd-3-clause
|
xadahiya/django,anant-dev/django,AltSchool/django,huang4fstudio/django,ryanahall/django,gunchleoc/django,andreif/django,vmarkovtsev/django,avneesh91/django,makinacorpus/django,ArnossArnossi/django,mitchelljkotler/django,avanov/django,sdcooke/django,denis-pitul/django,chyeh727/django,jhoos/django,ataylor32/django,piquadrat/django,SoftwareMaven/django,mttr/django,joakim-hove/django,akshatharaj/django,devops2014/djangosite,jejimenez/django,katrid/django,tuhangdi/django,archen/django,oscaro/django,blueyed/django,MarcJoan/django,GhostThrone/django,megaumi/django,jhoos/django,edmorley/django,theo-l/django,jscn/django,denys-duchier/django,frdb194/django,hcsturix74/django,koordinates/django,tayfun/django,makinacorpus/django,arun6582/django,frishberg/django,Korkki/django,avanov/django,weiawe/django,marcelocure/django,reinout/django,davidharrigan/django,hackerbot/DjangoDev,darkryder/django,sjlehtin/django,hackerbot/DjangoDev,epandurski/django,jgoclawski/django,wkschwartz/django,AltSchool/django,mcardillo55/django,hybrideagle/django,ticosax/django,django/django,ataylor32/django,maxsocl/django,andyzsf/django,helenst/django,camilonova/django,seanwestfall/django,quxiaolong1504/django,henryfjordan/django,dpetzold/django,delhivery/django,jyotsna1820/django,tragiclifestories/django,eyohansa/django,takis/django,kholidfu/django,robhudson/django,arun6582/django,alimony/django,alrifqi/django,ironbox360/django,epandurski/django,syphar/django,krishna-pandey-git/django,darjeeling/django,Argon-Zhou/django,yceruto/django,PolicyStat/django,curtisstpierre/django,gdub/django,jvkops/django,erikr/django,szopu/django,KokareIITP/django,elky/django,dpetzold/django,caotianwei/django,dwightgunning/django,ticosax/django,MarkusH/django,seanwestfall/django,baylee/django,RevelSystems/django,AndrewGrossman/django,Yong-Lee/django,chyeh727/django,adelton/django,rapilabs/django,syaiful6/django,mcrowson/django,sam-tsai/django,ajoaoff/django,ericfc/django,eugena/django,shownomercy/django,tcwicklund/django,elijah513/django,GaussDing/django,leekchan/django_test,MatthewWilkes/django,andela-ooladayo/django,piquadrat/django,savoirfairelinux/django,oberlin/django,phalt/django,barbuza/django,ironbox360/django,extremewaysback/django,DrMeers/django,SujaySKumar/django,ataylor32/django,deployed/django,Y3K/django,wweiradio/django,Matt-Deacalion/django,ecederstrand/django,savoirfairelinux/django,darkryder/django,WSDC-NITWarangal/django,whs/django,vincepandolfo/django,tuhangdi/django,petecummings/django,andela-ooladayo/django,ifduyue/django,mrbox/django,ptoraskar/django,dgladkov/django,dudepare/django,carljm/django,Nepherhotep/django,willharris/django,anant-dev/django,mcella/django,bobcyw/django,raphaelmerx/django,poiati/django,gunchleoc/django,quamilek/django,willhardy/django,megaumi/django,nju520/django,blighj/django,taaviteska/django,jaywreddy/django,jasonbot/django,xwolf12/django,claudep/django,hybrideagle/django,ar45/django,elky/django,varunnaganathan/django,benjaminjkraft/django,etos/django,pipermerriam/django,fafaman/django,simonw/django,himleyb85/django,avanov/django,dgladkov/django,kevintaw/django,IRI-Research/django,hassanabidpk/django,blighj/django,hunter007/django,takeshineshiro/django,treyhunner/django,chyeh727/django,felixjimenez/django,ar45/django,timgraham/django,payeldillip/django,jdelight/django,davidharrigan/django,django-nonrel/django,Adnn/django,SujaySKumar/django,YangSongzhou/django,baylee/django,leeon/annotated-django,ulope/django,jpic/django,PetrDlouhy/django,matiasb/django,zhoulingjun/django,nealtodd/django,techdragon/django,lsqtongxin/django,charettes/django,ebar0n/django,ziima/django,rsalmaso/django,beni55/django,nealtodd/django,ghedsouza/django,mmardini/django,Y3K/django,techdragon/django,kcpawan/django,mitchelljkotler/django,TimBuckley/effective_django,sopier/django,bikong2/django,zhoulingjun/django,dex4er/django,dpetzold/django,mitya57/django,hottwaj/django,ericholscher/django,django/django,feroda/django,krishna-pandey-git/django,kisna72/django,dfunckt/django,vitaly4uk/django,huang4fstudio/django,liavkoren/djangoDev,adelton/django,syaiful6/django,peterlauri/django,AndrewGrossman/django,MoritzS/django,takeshineshiro/django,wetneb/django,AndrewGrossman/django,dbaxa/django,mattrobenolt/django,bak1an/django,tomchristie/django,rhertzog/django,ojake/django,mshafiq9/django,gannetson/django,poiati/django,jasonwzhy/django,evansd/django,sergei-maertens/django,jeezybrick/django,wsmith323/django,eyohansa/django,waytai/django,xwolf12/django,rsalmaso/django,andresgz/django,irwinlove/django,akaariai/django,adamchainz/django,dudepare/django,auvipy/django,HonzaKral/django,djbaldey/django,aspidites/django,kamyu104/django,ojengwa/django-1,DONIKAN/django,spisneha25/django,taaviteska/django,evansd/django,waytai/django,ABaldwinHunter/django-clone-classic,gengue/django,lsqtongxin/django,fpy171/django,mewtaylor/django,Anonymous-X6/django,maxsocl/django,maxsocl/django,ticosax/django,ryangallen/django,jvkops/django,MoritzS/django,KokareIITP/django,shtouff/django,georgemarshall/django,dfdx2/django,krisys/django,KokareIITP/django,rhertzog/django,salamer/django,BrotherPhil/django,tomchristie/django,BMJHayward/django,tuhangdi/django,Beauhurst/django,waytai/django,z0by/django,lunafeng/django,xrmx/django,asser/django,sdcooke/django,myang321/django,gcd0318/django,zsiciarz/django,peterlauri/django,asser/django,yewang15215/django,haxoza/django,sadaf2605/django,EmadMokhtar/Django,jyotsna1820/django,unaizalakain/django,benjaminjkraft/django,blindroot/django,stevenewey/django,filias/django,EliotBerriot/django,NullSoldier/django,DrMeers/django,runekaagaard/django-contrib-locking,elena/django,ataylor32/django,rrrene/django,stevenewey/django,Matt-Deacalion/django,frePPLe/django,hnakamur/django,nemesisdesign/django,ytjiang/django,alilotfi/django,doismellburning/django,frdb194/django,jyotsna1820/django,whs/django,marqueedev/django,edmorley/django,mattseymour/django,seanwestfall/django,Balachan27/django,atul-bhouraskar/django,yigitguler/django,peterlauri/django,hkchenhongyi/django,djbaldey/django,solarissmoke/django,ajoaoff/django,Matt-Deacalion/django,SebasSBM/django,abomyi/django,kholidfu/django,craynot/django,hynekcer/django,twz915/django,sephii/django,marissazhou/django,camilonova/django,ivandevp/django,alimony/django,RossBrunton/django,denis-pitul/django,areski/django,dsanders11/django,dydek/django,tragiclifestories/django,djbaldey/django,aisipos/django,camilonova/django,vitaly4uk/django,programadorjc/django,rapilabs/django,googleinterns/django,ghickman/django,tcwicklund/django,pquentin/django,zsiciarz/django,rajsadho/django,WSDC-NITWarangal/django,jn7163/django,phalt/django,DONIKAN/django,elkingtonmcb/django,nealtodd/django,mitchelljkotler/django,jhg/django,tanmaythakur/django,TridevGuha/django,jasonwzhy/django,zanderle/django,nhippenmeyer/django,divio/django,jasonwzhy/django,tanmaythakur/django,mathspace/django,marckuz/django,MounirMesselmeni/django,apocquet/django,mattseymour/django,caotianwei/django,kholidfu/django,quxiaolong1504/django,dydek/django,mshafiq9/django,AlexHill/django,ojengwa/django-1,mattrobenolt/django,felixxm/django,Nepherhotep/django,spisneha25/django,kamyu104/django,krisys/django,BMJHayward/django,Leila20/django,areski/django,vitan/django,ghickman/django,shaib/django,edevil/django,evansd/django,Sonicbids/django,evansd/django,fpy171/django,gitaarik/django,ojake/django,sarthakmeh03/django,myang321/django,haxoza/django,shtouff/django,liuliwork/django,AltSchool/django,BrotherPhil/django,gdi2290/django,blueyed/django,robhudson/django,jallohm/django,yewang15215/django,risicle/django,MarcJoan/django,mrfuxi/django,zedr/django,twz915/django,oberlin/django,GitAngel/django,Anonymous-X6/django,druuu/django,mmardini/django,Beauhurst/django,jsoref/django,rhertzog/django,gengue/django,webgeodatavore/django,poiati/django,marissazhou/django,Beauhurst/django,mrbox/django,GaussDing/django,gcd0318/django,jgeskens/django,mcrowson/django,rynomster/django,devops2014/djangosite,sopier/django,jylaxp/django,aroche/django,kevintaw/django,postrational/django,dbaxa/django,MatthewWilkes/django,TimYi/django,sgzsh269/django,SebasSBM/django,beck/django,ifduyue/django,MatthewWilkes/django,archen/django,synasius/django,bikong2/django,arun6582/django,syphar/django,kamyu104/django,cainmatt/django,elky/django,ghedsouza/django,liuliwork/django,kaedroho/django,EliotBerriot/django,stewartpark/django,Vixionar/django,rogerhu/django,dursk/django,alexallah/django,labcodes/django,helenst/django,hottwaj/django,denys-duchier/django,yamila-moreno/django,TridevGuha/django,reinout/django,jdelight/django,gengue/django,ckirby/django,ivandevp/django,Vixionar/django,YYWen0o0/python-frame-django,YYWen0o0/python-frame-django,mjtamlyn/django,andela-ooladayo/django,gcd0318/django,sjlehtin/django,guettli/django,marqueedev/django,jpic/django,aidanlister/django,feroda/django,blindroot/django,HonzaKral/django,riteshshrv/django,davidharrigan/django,salamer/django,aisipos/django,alexmorozov/django,kangfend/django,sam-tsai/django,hassanabidpk/django,willhardy/django,ericfc/django,rmboggs/django,dwightgunning/django,Mixser/django,nju520/django,vitaly4uk/django,ytjiang/django,IRI-Research/django,aroche/django,crazy-canux/django,beni55/django,indevgr/django,tanmaythakur/django,savoirfairelinux/django,delinhabit/django,alrifqi/django,piquadrat/django,mcrowson/django,koniiiik/django,jscn/django,jeezybrick/django,lmorchard/django,jhg/django,mcardillo55/django,crazy-canux/django,xrmx/django,Matt-Deacalion/django,ryanahall/django,roselleebarle04/django,joequery/django,mttr/django,ojengwa/django-1,unaizalakain/django,delhivery/django,jarshwah/django,riteshshrv/django,filias/django,ziima/django,JorgeCoock/django,mbox/django,coldmind/django,rapilabs/django,zedr/django,frePPLe/django,rajsadho/django,django-nonrel/django,x111ong/django,daniponi/django,timgraham/django,ryanahall/django,akshatharaj/django,frdb194/django,programadorjc/django,fenginx/django,gdub/django,aroche/django,rlugojr/django,kutenai/django,olasitarska/django,syphar/django,marctc/django,googleinterns/django,wweiradio/django,rajsadho/django,mathspace/django,sarthakmeh03/django,taaviteska/django,vincepandolfo/django,marckuz/django,curtisstpierre/django,saydulk/django,takeshineshiro/django,oberlin/django,hobarrera/django,aspidites/django,SoftwareMaven/django,tayfun/django,hkchenhongyi/django,atul-bhouraskar/django,alrifqi/django,digimarc/django,ghedsouza/django,nielsvanoch/django,Yong-Lee/django,jpic/django,hunter007/django,techdragon/django,joakim-hove/django,hcsturix74/django,seocam/django,BMJHayward/django,hkchenhongyi/django,erikr/django,wsmith323/django,ebar0n/django,nemesisdesign/django,simonw/django,lmorchard/django,pquentin/django,twz915/django,sadaf2605/django,ABaldwinHunter/django-clone-classic,dhruvagarwal/django,edmorley/django,ecederstrand/django,zhaodelong/django,PolicyStat/django,frankvdp/django,akshatharaj/django,ulope/django,felixxm/django,adamchainz/django,doismellburning/django,eyohansa/django,fpy171/django,lunafeng/django,zhaodelong/django,henryfjordan/django,z0by/django,seocam/django,haxoza/django,yewang15215/django,mttr/django,tysonclugg/django,sgzsh269/django,quamilek/django,jn7163/django,dursk/django,mitya57/django,darjeeling/django,gitaarik/django,kholidfu/django,aerophile/django,mjtamlyn/django,Beauhurst/django,marckuz/django,drjeep/django,bikong2/django,rwillmer/django,risicle/django,digimarc/django,alilotfi/django,stewartpark/django,AlexHill/django,bobcyw/django,jgoclawski/django,denisenkom/django,hkchenhongyi/django,RevelSystems/django,blueyed/django,redhat-openstack/django,tbeadle/django,koordinates/django,dfdx2/django,codepantry/django,solarissmoke/django,frankvdp/django,EmadMokhtar/Django,mewtaylor/django,tomchristie/django,donkirkby/django,gitaarik/django,jejimenez/django,seocam/django,karyon/django,ckirby/django,beckastar/django,frePPLe/django,jnovinger/django,marcelocure/django,MarcJoan/django,b-me/django,Korkki/django,ebar0n/django,vmarkovtsev/django,deployed/django,matiasb/django,manhhomienbienthuy/django,tysonclugg/django,areski/django,GitAngel/django,guettli/django,andela-ooladayo/django,MikeAmy/django,extremewaysback/django,jmcarp/django,Korkki/django,ticosax/django,caotianwei/django,gcd0318/django,ajoaoff/django,yamila-moreno/django,vincepandolfo/django,sam-tsai/django,JavML/django,shacker/django,wsmith323/django,avneesh91/django,gohin/django,WillGuan105/django,MarkusH/django,pasqualguerrero/django,gannetson/django,jscn/django,etos/django,andreif/django,sarvex/django,shtouff/django,reinout/django,mattseymour/django,drjeep/django,andreif/django,frankvdp/django,dhruvagarwal/django,jarshwah/django,redhat-openstack/django,GhostThrone/django,redhat-openstack/django,mrfuxi/django,jeezybrick/django,mojeto/django,mshafiq9/django,double-y/django,ericfc/django,kisna72/django,mattrobenolt/django,yamila-moreno/django,rockneurotiko/django,koniiiik/django,jsoref/django,yceruto/django,MikeAmy/django,yakky/django,neiudemo1/django,schinckel/django,epandurski/django,bak1an/django,kswiat/django,frankvdp/django,takeshineshiro/django,NullSoldier/django,DasIch/django,DrMeers/django,craynot/django,marctc/django,rogerhu/django,barbuza/django,raphaelmerx/django,zanderle/django,elijah513/django,mitya57/django,zerc/django,himleyb85/django,memtoko/django,apocquet/django,zhaodelong/django,irwinlove/django,duqiao/django,scorphus/django,akshatharaj/django,BlindHunter/django,mattseymour/django,ifduyue/django,GaussDing/django,donkirkby/django,claudep/django,runekaagaard/django-contrib-locking,jejimenez/django,payeldillip/django,loic/django,mjtamlyn/django,nielsvanoch/django,h4r5h1t/django-hauthy,indevgr/django,robhudson/django,simone/django-gb,marissazhou/django,sarvex/django,ptoraskar/django,nealtodd/django,hackerbot/DjangoDev,treyhunner/django,elkingtonmcb/django,runekaagaard/django-contrib-locking,auready/django,Endika/django,leeon/annotated-django,Leila20/django,wsmith323/django,whs/django,SoftwareMaven/django,yigitguler/django,gitaarik/django,akaariai/django,myang321/django,timgraham/django,BlindHunter/django,hnakamur/django,irwinlove/django,1013553207/django,petecummings/django,hobarrera/django,PolicyStat/django,auvipy/django,feroda/django,kangfend/django,ivandevp/django,eugena/django,ecederstrand/django,koniiiik/django,tragiclifestories/django,elijah513/django,zerc/django,seocam/django,theo-l/django,elena/django,sbellem/django,guettli/django,atul-bhouraskar/django,jgeskens/django,AltSchool/django,leeon/annotated-django,bitcity/django,fafaman/django,bak1an/django,aerophile/django,treyhunner/django,beni55/django,kcpawan/django,marckuz/django,auvipy/django,myang321/django,dydek/django,jyotsna1820/django,pauloxnet/django,JavML/django,bitcity/django,aspidites/django,GitAngel/django,apollo13/django,ytjiang/django,x111ong/django,alimony/django,shtouff/django,sjlehtin/django,rockneurotiko/django,cainmatt/django,solarissmoke/django,denis-pitul/django,liavkoren/djangoDev,rrrene/django,techdragon/django,uranusjr/django,ptoraskar/django,liu602348184/django,xwolf12/django,zulip/django,TimBuckley/effective_django,blindroot/django,jylaxp/django,jaywreddy/django,sdcooke/django,ajaali/django,rhertzog/django,rynomster/django,h4r5h1t/django-hauthy,moreati/django,barbuza/django,himleyb85/django,ghickman/django,monetate/django,bikong2/django,bspink/django,rizumu/django,mcrowson/django,kangfend/django,varunnaganathan/django,hynekcer/django,kevintaw/django,HonzaKral/django,rtindru/django,codepantry/django,raphaelmerx/django,irwinlove/django,spisneha25/django,SujaySKumar/django,dhruvagarwal/django,beni55/django,DasIch/django,YangSongzhou/django,loic/django,marqueedev/django,reinout/django,MoritzS/django,sarthakmeh03/django,rsvip/Django,ironbox360/django,mojeto/django,maxsocl/django,akintoey/django,gannetson/django,mjtamlyn/django,jpic/django,sopier/django,github-account-because-they-want-it/django,WillGuan105/django,DasIch/django,blighj/django,andyzsf/django,gdub/django,rrrene/django,salamer/django,sarthakmeh03/django,duqiao/django,roselleebarle04/django,georgemarshall/django,kswiat/django,dbaxa/django,yask123/django,jarshwah/django,kutenai/django,sergei-maertens/django,abomyi/django,jmcarp/django,lmorchard/django,MarkusH/django,charettes/django,vitan/django,TridevGuha/django,akintoey/django,nju520/django,mdj2/django,xadahiya/django,lunafeng/django,himleyb85/django,monetate/django,loic/django,MikeAmy/django,jmcarp/django,nhippenmeyer/django,katrid/django,dpetzold/django,eugena/django,yograterol/django,mitchelljkotler/django,aidanlister/django,Argon-Zhou/django,mojeto/django,varunnaganathan/django,webgeodatavore/django,ulope/django,delhivery/django,mlavin/django,JavML/django,sergei-maertens/django,dhruvagarwal/django,ghickman/django,auvipy/django,sadaf2605/django,asser/django,seanwestfall/django,b-me/django,github-account-because-they-want-it/django,megaumi/django,github-account-because-they-want-it/django,frishberg/django,rsvip/Django,monetate/django,mcardillo55/django,leekchan/django_test,coldmind/django,WillGuan105/django,rockneurotiko/django,pauloxnet/django,raphaelmerx/django,gchp/django,mewtaylor/django,fenginx/django,shownomercy/django,wetneb/django,MikeAmy/django,z0by/django,synasius/django,TridevGuha/django,knifenomad/django,zerc/django,saydulk/django,felixxm/django,denisenkom/django,peterlauri/django,rlugojr/django,fafaman/django,googleinterns/django,extremewaysback/django,whs/django,gchp/django,pipermerriam/django,fpy171/django,rizumu/django,spisneha25/django,blueyed/django,gchp/django,jenalgit/django,apollo13/django,saydulk/django,SebasSBM/django,denis-pitul/django,sephii/django,felixjimenez/django,Mixser/django,ebar0n/django,stewartpark/django,litchfield/django,ajaali/django,schinckel/django,mbox/django,donkirkby/django,hottwaj/django,rlugojr/django,craynot/django,risicle/django,akintoey/django,iambibhas/django,shaistaansari/django,rtindru/django,hybrideagle/django,phalt/django,dursk/django,freakboy3742/django,gdi2290/django,uranusjr/django,beckastar/django,oscaro/django,andreif/django,tbeadle/django,MarcJoan/django,roselleebarle04/django,mshafiq9/django,willharris/django,adambrenecki/django,gunchleoc/django,koordinates/django,gdub/django,dsanders11/django,aerophile/django,huang4fstudio/django,vmarkovtsev/django,andela-ifageyinbo/django,joequery/django,Mixser/django,benjaminjkraft/django,hynekcer/django,kangfend/django,mojeto/django,andresgz/django,camilonova/django,elky/django,schinckel/django,adamchainz/django,BlindHunter/django,pipermerriam/django,Anonymous-X6/django,iambibhas/django,ckirby/django,davidharrigan/django,ABaldwinHunter/django-clone-classic,manhhomienbienthuy/django,IRI-Research/django,dracos/django,knifenomad/django,jhg/django,zsiciarz/django,phalt/django,carljm/django,quxiaolong1504/django,akaariai/django,mdj2/django,liu602348184/django,Adnn/django,ajaali/django,oinopion/django,apollo13/django,yamila-moreno/django,adelton/django,jhoos/django,krisys/django,bobcyw/django,filias/django,moreati/django,yakky/django,pauloxnet/django,PetrDlouhy/django,jnovinger/django,henryfjordan/django,abomyi/django,stevenewey/django,djbaldey/django,delhivery/django,drjeep/django,crazy-canux/django,quamilek/django,ryangallen/django,double-y/django,rsvip/Django,gohin/django,hynekcer/django,apocquet/django,mmardini/django,anant-dev/django,ar45/django,t0in4/django,adamchainz/django,sarvex/django,MounirMesselmeni/django,doismellburning/django,dgladkov/django,rizumu/django,marcelocure/django,alexallah/django,stewartpark/django,mcella/django,marcelocure/django,etos/django,anant-dev/django,bak1an/django,marissazhou/django,dex4er/django,zhoulingjun/django,yewang15215/django,shacker/django,alexallah/django,jaywreddy/django,HousekeepLtd/django,andela-ifageyinbo/django,Mixser/django,ar45/django,zanderle/django,denys-duchier/django,kisna72/django,kswiat/django,Anonymous-X6/django,tysonclugg/django,savoirfairelinux/django,GitAngel/django,erikr/django,ABaldwinHunter/django-clone,epandurski/django,hottwaj/django,jasonbot/django,labcodes/django,Leila20/django,mrfuxi/django,karyon/django,theo-l/django,Beeblio/django,sergei-maertens/django,matiasb/django,jrrembert/django,fenginx/django,frishberg/django,mttr/django,dracos/django,takis/django,hcsturix74/django,harisibrahimkv/django,kamyu104/django,weiawe/django,lwiecek/django,hobarrera/django,extremewaysback/django,jdelight/django,supriyantomaftuh/django,edmorley/django,bobcyw/django,divio/django,MounirMesselmeni/django,YYWen0o0/python-frame-django,Nepherhotep/django,xrmx/django,gunchleoc/django,marctc/django,andela-ifageyinbo/django,dbaxa/django,jasonbot/django,jenalgit/django,hybrideagle/django,RevelSystems/django,HousekeepLtd/django,poiati/django,yograterol/django,oinopion/django,ptoraskar/django,sadaf2605/django,rajsadho/django,fafaman/django,gchp/django,ericholscher/django,knifenomad/django,JorgeCoock/django,xadahiya/django,errx/django,Beeblio/django,arun6582/django,jnovinger/django,wkschwartz/django,divio/django,katrid/django,wkschwartz/django,denisenkom/django,alexmorozov/django,alexallah/django,ajaali/django,akaariai/django,tysonclugg/django,auready/django,neiudemo1/django,frdb194/django,DONIKAN/django,dudepare/django,mlavin/django,x111ong/django,PetrDlouhy/django,mcella/django,ArnossArnossi/django,github-account-because-they-want-it/django,ckirby/django,ASCrookes/django,supriyantomaftuh/django,rynomster/django,nemesisdesign/django,Endika/django,oinopion/django,asser/django,MoritzS/django,codepantry/django,felixxm/django,RevelSystems/django,davgibbs/django,avneesh91/django,adelton/django,darkryder/django,ziima/django,tragiclifestories/django,tbeadle/django,beckastar/django,karyon/django,jallohm/django,jvkops/django,willhardy/django,ArnossArnossi/django,shaib/django,solarissmoke/django,caotianwei/django,huang4fstudio/django,tanmaythakur/django,loic/django,salamer/django,chyeh727/django,sephii/django,unaizalakain/django,redhat-openstack/django,mattrobenolt/django,beck/django,erikr/django,liuliwork/django,Korkki/django,nielsvanoch/django,syaiful6/django,roselleebarle04/django,guettli/django,neiudemo1/django,joakim-hove/django,fenginx/django,matiasb/django,googleinterns/django,riteshshrv/django,petecummings/django,dudepare/django,claudep/django,aisipos/django,riteshshrv/django,t0in4/django,zhaodelong/django,rtindru/django,krishna-pandey-git/django,kevintaw/django,zhoulingjun/django,liuliwork/django,claudep/django,megaumi/django,vitaly4uk/django,ArnossArnossi/django,RossBrunton/django,mcardillo55/django,Vixionar/django,labcodes/django,rynomster/django,darkryder/django,uranusjr/django,auready/django,lsqtongxin/django,hunter007/django,willharris/django,drjeep/django,mcella/django,t0in4/django,pasqualguerrero/django,double-y/django,alexmorozov/django,postrational/django,dfdx2/django,nhippenmeyer/django,szopu/django,1013553207/django,denys-duchier/django,scorphus/django,andela-ifageyinbo/django,vitan/django,dracos/django,rsalmaso/django,RossBrunton/django,BlindHunter/django,bitcity/django,alilotfi/django,hackerbot/DjangoDev,xrmx/django,felixjimenez/django,jsoref/django,Sonicbids/django,ojake/django,Yong-Lee/django,ASCrookes/django,henryfjordan/django,aidanlister/django,programadorjc/django,hcsturix74/django,shaistaansari/django,shaib/django,supriyantomaftuh/django,neiudemo1/django,knifenomad/django,hassanabidpk/django,tomchristie/django,lsqtongxin/django,sbellem/django,yigitguler/django,beck/django,payeldillip/django,jn7163/django,errx/django,bspink/django,weiawe/django,joequery/django,Balachan27/django,craynot/django,MarkusH/django,ironbox360/django,shownomercy/django,jylaxp/django,vitan/django,mewtaylor/django,digimarc/django,delinhabit/django,jnovinger/django,dwightgunning/django,ryangallen/django,KokareIITP/django,takis/django,jasonbot/django,Argon-Zhou/django,freakboy3742/django,bspink/django,sarvex/django,harisibrahimkv/django,bitcity/django,dfunckt/django,Sonicbids/django,rtindru/django,django/django,SoftwareMaven/django,manhhomienbienthuy/django,WSDC-NITWarangal/django,tbeadle/django,shaistaansari/django,charettes/django,WillGuan105/django,georgemarshall/django,yask123/django,daniponi/django,lwiecek/django,SebasSBM/django,nhippenmeyer/django,MatthewWilkes/django,moreati/django,szopu/django,shacker/django,liu602348184/django,NullSoldier/django,jgoclawski/django,baylee/django,daniponi/django,sgzsh269/django,SujaySKumar/django,digimarc/django,manhhomienbienthuy/django,leekchan/django_test,saydulk/django,TimYi/django,cainmatt/django,andresgz/django,jejimenez/django,ASCrookes/django,zulip/django,mlavin/django,TimBuckley/effective_django,wetneb/django,takis/django,Vixionar/django,robhudson/django,elena/django,beckastar/django,davgibbs/django,adambrenecki/django,charettes/django,shaistaansari/django,jenalgit/django,helenst/django,ericholscher/django,alexmorozov/django,jgoclawski/django,jasonwzhy/django,frishberg/django,edevil/django,duqiao/django,Adnn/django,hassanabidpk/django,felixjimenez/django,rockneurotiko/django,jallohm/django,webgeodatavore/django,taaviteska/django,harisibrahimkv/django,rizumu/django,programadorjc/django,lmorchard/django,druuu/django,indevgr/django,adambrenecki/django,ojake/django,lwiecek/django,joequery/django,willhardy/django,simone/django-gb,schinckel/django,weiawe/django,memtoko/django,TimYi/django,frePPLe/django,Adnn/django,jeezybrick/django,kisna72/django,gohin/django,ABaldwinHunter/django-clone-classic,sam-tsai/django,hnakamur/django,hobarrera/django,tayfun/django,karyon/django,yask123/django,marctc/django,zanderle/django,t0in4/django,ziima/django,supriyantomaftuh/django,blindroot/django,NullSoldier/django,petecummings/django,intgr/django,dfunckt/django,Balachan27/django,h4r5h1t/django-hauthy,elena/django,HousekeepLtd/django,synasius/django,divio/django,dsanders11/django,duqiao/django,GaussDing/django,yceruto/django,ABaldwinHunter/django-clone,eugena/django,koniiiik/django,dursk/django,rmboggs/django,varunnaganathan/django,rwillmer/django,krishna-pandey-git/django,filias/django,coldmind/django,abomyi/django,EliotBerriot/django,atul-bhouraskar/django,vincepandolfo/django,dex4er/django,harisibrahimkv/django,eyohansa/django,Nepherhotep/django,avneesh91/django,dwightgunning/django,archen/django,indevgr/django,ryanahall/django,follow99/django,AlexHill/django,pipermerriam/django,mmardini/django,rwillmer/django,ASCrookes/django,apollo13/django,sbellem/django,1013553207/django,syaiful6/django,devops2014/djangosite,crazy-canux/django,synasius/django,kosz85/django,mrbox/django,zulip/django,ecederstrand/django,WSDC-NITWarangal/django,jsoref/django,GhostThrone/django,wetneb/django,kosz85/django,jrrembert/django,jrrembert/django,postrational/django,andyzsf/django,memtoko/django,aerophile/django,tayfun/django,jenalgit/django,rmboggs/django,xwolf12/django,DONIKAN/django,lunafeng/django,oberlin/django,follow99/django,simonw/django,litchfield/django,waytai/django,treyhunner/django,oscaro/django,tuhangdi/django,druuu/django,Beeblio/django,piquadrat/django,krisys/django,HousekeepLtd/django,TimYi/django,errx/django,darjeeling/django,sopier/django,delinhabit/django,edevil/django,gengue/django,elkingtonmcb/django,z0by/django,follow99/django,bspink/django,mathspace/django,mrbox/django,sbellem/django,payeldillip/django,pasqualguerrero/django,elijah513/django,DasIch/django,1013553207/django,kutenai/django,dfunckt/django,tcwicklund/django,jallohm/django,druuu/django,EliotBerriot/django,BMJHayward/django,aidanlister/django,kutenai/django,ghedsouza/django,rogerhu/django,django-nonrel/django,davgibbs/django,Y3K/django,coldmind/django,olasitarska/django,dgladkov/django,daniponi/django,oinopion/django,cainmatt/django,yakky/django,scorphus/django,HonzaKral/django,olasitarska/django,sgzsh269/django,jmcarp/django,kosz85/django,feroda/django,blighj/django,Yong-Lee/django,jvkops/django,jgeskens/django,pauloxnet/django,jrrembert/django,dydek/django,b-me/django,GhostThrone/django,rwillmer/django,baylee/django,BrotherPhil/django,twz915/django,dfdx2/django,rlugojr/django,unaizalakain/django,jhg/django,mbox/django,aroche/django,tcwicklund/django,wkschwartz/django,follow99/django,litchfield/django,jhoos/django,nju520/django,theo-l/django,joakim-hove/django,rsalmaso/django,marqueedev/django,labcodes/django,jscn/django,intgr/django,b-me/django,syphar/django,jdelight/django,carljm/django,intgr/django,Y3K/django,aspidites/django,AndrewGrossman/django,shaib/django,donkirkby/django,Balachan27/django,sdcooke/django,YangSongzhou/django,simonw/django,liu602348184/django,dsanders11/django,elkingtonmcb/django,zulip/django,alrifqi/django,monetate/django,etos/django,akintoey/django,mathspace/django,yask123/django,katrid/django,rsvip/Django,ericfc/django,JorgeCoock/django,litchfield/django,lwiecek/django,wweiradio/django,barbuza/django,delinhabit/django,ifduyue/django,shownomercy/django,gohin/django,auready/django,django/django,pquentin/django,double-y/django,zedr/django,deployed/django,iambibhas/django,stevenewey/django,h4r5h1t/django-hauthy,zerc/django,MounirMesselmeni/django,Beeblio/django,yakky/django,webgeodatavore/django,mrfuxi/django,ytjiang/django,gdi2290/django,mitya57/django,darjeeling/django,scorphus/django,RossBrunton/django,jn7163/django,ajoaoff/django,x111ong/django,ABaldwinHunter/django-clone,aisipos/django,zsiciarz/django,quxiaolong1504/django,sjlehtin/django,vmarkovtsev/django,shacker/django,JavML/django,jaywreddy/django,mdj2/django,xadahiya/django,Endika/django,georgemarshall/django,makinacorpus/django,apocquet/django,dracos/django,yograterol/django,codepantry/django,ivandevp/django,andresgz/django,quamilek/django,alilotfi/django,Leila20/django,haxoza/django,jylaxp/django,uranusjr/django,curtisstpierre/django,kaedroho/django,jarshwah/django,davgibbs/django,hnakamur/django,PetrDlouhy/django,koordinates/django,rapilabs/django,wweiradio/django,benjaminjkraft/django,ryangallen/django,EmadMokhtar/Django,avanov/django,ABaldwinHunter/django-clone,oscaro/django,curtisstpierre/django,rrrene/django,mlavin/django,freakboy3742/django,moreati/django,kcpawan/django,nemesisdesign/django,intgr/django,kaedroho/django,JorgeCoock/django,BrotherPhil/django,django-nonrel/django,kcpawan/django,kosz85/django,Argon-Zhou/django,beck/django,Endika/django,areski/django,gannetson/django,willharris/django,hunter007/django,alimony/django,yograterol/django,simone/django-gb,risicle/django,ojengwa/django-1,carljm/django,YangSongzhou/django,rmboggs/django,pasqualguerrero/django,timgraham/django,liavkoren/djangoDev
|
- from django.views.generic.edit import FormMixin
from django.contrib import messages
- class SuccessMessageMixin(FormMixin):
+ class SuccessMessageMixin(object):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
Remove unnecessary and problematic parent class from SuccessMessageMixin
|
## Code Before:
from django.views.generic.edit import FormMixin
from django.contrib import messages
class SuccessMessageMixin(FormMixin):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
## Instruction:
Remove unnecessary and problematic parent class from SuccessMessageMixin
## Code After:
from django.contrib import messages
class SuccessMessageMixin(object):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
- from django.views.generic.edit import FormMixin
from django.contrib import messages
- class SuccessMessageMixin(FormMixin):
? - ^^^^^^^
+ class SuccessMessageMixin(object):
? ^^^^^
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
e82ab299a6c68f682a9f9b769e79cf2054684e3b
|
reviewboard/attachments/evolutions/file_attachment_uuid.py
|
reviewboard/attachments/evolutions/file_attachment_uuid.py
|
from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=None, null=True),
]
|
from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=''),
]
|
Fix the FileAttachment.uuid evolution to match the model field.
|
Fix the FileAttachment.uuid evolution to match the model field.
The evolution that was included in the existing code didn't match the
definition of the field. This is a very simple fix.
Testing done:
Ran evolutions.
Reviewed at https://reviews.reviewboard.org/r/8141/
|
Python
|
mit
|
davidt/reviewboard,davidt/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,sgallagher/reviewboard,brennie/reviewboard,reviewboard/reviewboard,brennie/reviewboard,sgallagher/reviewboard,chipx86/reviewboard,brennie/reviewboard,chipx86/reviewboard,reviewboard/reviewboard,davidt/reviewboard,chipx86/reviewboard,sgallagher/reviewboard,reviewboard/reviewboard,sgallagher/reviewboard,brennie/reviewboard,davidt/reviewboard
|
from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
- initial=None, null=True),
+ initial=''),
]
|
Fix the FileAttachment.uuid evolution to match the model field.
|
## Code Before:
from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=None, null=True),
]
## Instruction:
Fix the FileAttachment.uuid evolution to match the model field.
## Code After:
from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=''),
]
|
from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
- initial=None, null=True),
+ initial=''),
]
|
743064dbe22e40928c50817417077b8d52de641c
|
twistedchecker/functionaltests/comments.py
|
twistedchecker/functionaltests/comments.py
|
a = 1 + 2 # A comment begins with two whitespace.
# a comment begins with a lowercase letter.
# Good comment examples.
# A sentence that spans multiple lines
# doesn't need to have capitalization on second line.
# Here's some code samples:
# x = x + 1
# Make sure no error occur when checking an empty comment
#
# '\r\n\t' a comment can start with a new lines characters.
var = 1 + 2 # \r\n same for inline comments.
|
a = 1 + 2 # A comment begins with two whitespace.
# a comment begins with a lowercase letter.
# Good comment examples.
# A sentence that spans multiple lines
# doesn't need to have capitalization on second line.
# Here's some code samples:
# x = x + 1
# Make sure no error occur when checking an empty comment
#
# '\r\n\t' a comment can start with a new lines characters.
var = 1 + 2 # \r\n same for inline comments.
# `literal` is fine at the start.
|
Add example with back ticks.
|
Add example with back ticks.
|
Python
|
mit
|
twisted/twistedchecker
|
a = 1 + 2 # A comment begins with two whitespace.
# a comment begins with a lowercase letter.
# Good comment examples.
# A sentence that spans multiple lines
# doesn't need to have capitalization on second line.
# Here's some code samples:
# x = x + 1
# Make sure no error occur when checking an empty comment
#
# '\r\n\t' a comment can start with a new lines characters.
var = 1 + 2 # \r\n same for inline comments.
+ # `literal` is fine at the start.
+
|
Add example with back ticks.
|
## Code Before:
a = 1 + 2 # A comment begins with two whitespace.
# a comment begins with a lowercase letter.
# Good comment examples.
# A sentence that spans multiple lines
# doesn't need to have capitalization on second line.
# Here's some code samples:
# x = x + 1
# Make sure no error occur when checking an empty comment
#
# '\r\n\t' a comment can start with a new lines characters.
var = 1 + 2 # \r\n same for inline comments.
## Instruction:
Add example with back ticks.
## Code After:
a = 1 + 2 # A comment begins with two whitespace.
# a comment begins with a lowercase letter.
# Good comment examples.
# A sentence that spans multiple lines
# doesn't need to have capitalization on second line.
# Here's some code samples:
# x = x + 1
# Make sure no error occur when checking an empty comment
#
# '\r\n\t' a comment can start with a new lines characters.
var = 1 + 2 # \r\n same for inline comments.
# `literal` is fine at the start.
|
a = 1 + 2 # A comment begins with two whitespace.
# a comment begins with a lowercase letter.
# Good comment examples.
# A sentence that spans multiple lines
# doesn't need to have capitalization on second line.
# Here's some code samples:
# x = x + 1
# Make sure no error occur when checking an empty comment
#
# '\r\n\t' a comment can start with a new lines characters.
var = 1 + 2 # \r\n same for inline comments.
+
+ # `literal` is fine at the start.
|
ddf3e604cee09d82ea8741d2ed08f600ba2f70c0
|
scaffolder/commands/list.py
|
scaffolder/commands/list.py
|
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
help = 'Template command help entry'
parser = OptionParser(
version=self.get_version(),
option_list=self.get_option_list(),
usage='\n %prog {0} [OPTIONS]'.format(name)
)
aliases = ('tmp',)
BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
|
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
help = 'Template command help entry'
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
|
Remove __init__ method, not needed.
|
ListCommand: Remove __init__ method, not needed.
|
Python
|
mit
|
goliatone/minions
|
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
- def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
- help = 'Template command help entry'
+ help = 'Template command help entry'
- parser = OptionParser(
- version=self.get_version(),
- option_list=self.get_option_list(),
- usage='\n %prog {0} [OPTIONS]'.format(name)
- )
- aliases = ('tmp',)
- BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
|
Remove __init__ method, not needed.
|
## Code Before:
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
help = 'Template command help entry'
parser = OptionParser(
version=self.get_version(),
option_list=self.get_option_list(),
usage='\n %prog {0} [OPTIONS]'.format(name)
)
aliases = ('tmp',)
BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
## Instruction:
Remove __init__ method, not needed.
## Code After:
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
help = 'Template command help entry'
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
|
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
- def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
- help = 'Template command help entry'
? ----
+ help = 'Template command help entry'
- parser = OptionParser(
- version=self.get_version(),
- option_list=self.get_option_list(),
- usage='\n %prog {0} [OPTIONS]'.format(name)
- )
- aliases = ('tmp',)
- BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
|
9adb52b4a3295afcaaa4c830835d42ce0bbbb03e
|
udemy/missingelement.py
|
udemy/missingelement.py
|
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
|
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
def finder3(l1, l2):
"""
Find the missing element in a non-python specific approach in constant
space complexity.
"""
result = 0
for num in l1 + l2:
result ^= num
return result
|
Add XOR approach for finding missing element
|
Add XOR approach for finding missing element
Add approach for finding the missing element in the second list by performing a series of XOR operations.
|
Python
|
mit
|
chinhtle/python_fun
|
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
+
+
+ def finder3(l1, l2):
+ """
+ Find the missing element in a non-python specific approach in constant
+ space complexity.
+ """
+ result = 0
+
+ for num in l1 + l2:
+ result ^= num
+
+ return result
|
Add XOR approach for finding missing element
|
## Code Before:
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
## Instruction:
Add XOR approach for finding missing element
## Code After:
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
def finder3(l1, l2):
"""
Find the missing element in a non-python specific approach in constant
space complexity.
"""
result = 0
for num in l1 + l2:
result ^= num
return result
|
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
+
+
+ def finder3(l1, l2):
+ """
+ Find the missing element in a non-python specific approach in constant
+ space complexity.
+ """
+ result = 0
+
+ for num in l1 + l2:
+ result ^= num
+
+ return result
|
c4e497f24818169e8c59c07246582223c8214e45
|
bitfield/forms.py
|
bitfield/forms.py
|
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
|
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
if isinstance(value, int):
result = BitHandler(value, [k for k, v in self.choices])
else:
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
|
Allow values of BitFormField's to be integers (for legacy compatibility in some apps)
|
Allow values of BitFormField's to be integers (for legacy compatibility in some apps)
|
Python
|
apache-2.0
|
moggers87/django-bitfield,joshowen/django-bitfield,Elec/django-bitfield,budlight/django-bitfield,disqus/django-bitfield
|
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
+ if isinstance(value, int):
+ result = BitHandler(value, [k for k, v in self.choices])
+ else:
- result = BitHandler(0, [k for k, v in self.choices])
+ result = BitHandler(0, [k for k, v in self.choices])
- for k in value:
+ for k in value:
- try:
+ try:
- setattr(result, str(k), True)
+ setattr(result, str(k), True)
- except AttributeError:
+ except AttributeError:
- raise ValidationError('Unknown choice')
+ raise ValidationError('Unknown choice')
return int(result)
|
Allow values of BitFormField's to be integers (for legacy compatibility in some apps)
|
## Code Before:
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
## Instruction:
Allow values of BitFormField's to be integers (for legacy compatibility in some apps)
## Code After:
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
if isinstance(value, int):
result = BitHandler(value, [k for k, v in self.choices])
else:
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
|
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
+ if isinstance(value, int):
+ result = BitHandler(value, [k for k, v in self.choices])
+ else:
- result = BitHandler(0, [k for k, v in self.choices])
+ result = BitHandler(0, [k for k, v in self.choices])
? ++++
- for k in value:
+ for k in value:
? ++++
- try:
+ try:
? ++++
- setattr(result, str(k), True)
+ setattr(result, str(k), True)
? ++++
- except AttributeError:
+ except AttributeError:
? ++++
- raise ValidationError('Unknown choice')
+ raise ValidationError('Unknown choice')
? ++++
return int(result)
|
61b2266cbd70eacf1382f3a6c46dd16485e4f7e7
|
utils/exporter.py
|
utils/exporter.py
|
import plotly as py
from os import makedirs
from utils.names import output_file_name
_out_dir = 'graphs/'
def export(fig, module, dates):
graph_dir = '{}{}/'.format(_out_dir, str(module))[:-3] # remove .py extension from dir names
makedirs(graph_dir, exist_ok=True)
py.offline.plot(fig, filename=graph_dir + output_file_name(module, dates))
|
import plotly as py
from os import makedirs
from utils.names import output_file_name
_out_dir = 'graphs'
def export(fig, module, dates):
graph_dir = '{}/{}'.format(_out_dir, module[:-3]) # remove .py extension from dir names
makedirs(graph_dir, exist_ok=True)
py.offline.plot(fig, filename='{}/{}'.format(graph_dir, output_file_name(module, dates)))
|
Fix naming of output dir and file names
|
Fix naming of output dir and file names
|
Python
|
mit
|
f-jiang/sleep-pattern-grapher
|
import plotly as py
from os import makedirs
from utils.names import output_file_name
- _out_dir = 'graphs/'
+ _out_dir = 'graphs'
def export(fig, module, dates):
- graph_dir = '{}{}/'.format(_out_dir, str(module))[:-3] # remove .py extension from dir names
+ graph_dir = '{}/{}'.format(_out_dir, module[:-3]) # remove .py extension from dir names
makedirs(graph_dir, exist_ok=True)
- py.offline.plot(fig, filename=graph_dir + output_file_name(module, dates))
+ py.offline.plot(fig, filename='{}/{}'.format(graph_dir, output_file_name(module, dates)))
|
Fix naming of output dir and file names
|
## Code Before:
import plotly as py
from os import makedirs
from utils.names import output_file_name
_out_dir = 'graphs/'
def export(fig, module, dates):
graph_dir = '{}{}/'.format(_out_dir, str(module))[:-3] # remove .py extension from dir names
makedirs(graph_dir, exist_ok=True)
py.offline.plot(fig, filename=graph_dir + output_file_name(module, dates))
## Instruction:
Fix naming of output dir and file names
## Code After:
import plotly as py
from os import makedirs
from utils.names import output_file_name
_out_dir = 'graphs'
def export(fig, module, dates):
graph_dir = '{}/{}'.format(_out_dir, module[:-3]) # remove .py extension from dir names
makedirs(graph_dir, exist_ok=True)
py.offline.plot(fig, filename='{}/{}'.format(graph_dir, output_file_name(module, dates)))
|
import plotly as py
from os import makedirs
from utils.names import output_file_name
- _out_dir = 'graphs/'
? -
+ _out_dir = 'graphs'
def export(fig, module, dates):
- graph_dir = '{}{}/'.format(_out_dir, str(module))[:-3] # remove .py extension from dir names
? - ---- -- ^
+ graph_dir = '{}/{}'.format(_out_dir, module[:-3]) # remove .py extension from dir names
? + ^
makedirs(graph_dir, exist_ok=True)
- py.offline.plot(fig, filename=graph_dir + output_file_name(module, dates))
? ^^
+ py.offline.plot(fig, filename='{}/{}'.format(graph_dir, output_file_name(module, dates)))
? +++++++++++++++ ^ +
|
02076f919e56503c76a41e78feed8a6720c65c19
|
robot/robot/src/autonomous/timed_shoot.py
|
robot/robot/src/autonomous/timed_shoot.py
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
from common.autonomous_helper import StatefulAutonomous, timed_state
class TimedShootAutonomous(StatefulAutonomous):
'''
Tunable autonomous mode that does dumb time-based shooting
decisions. Works consistently.
'''
DEFAULT = False
MODE_NAME = "Timed shoot"
def __init__(self, components):
super().__init__(components)
self.register_sd_var('drive_speed', 0.5)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, tm):
# always keep the arm down
self.intake.armDown()
if tm > 0.3:
self.catapult.pulldown()
super().update(tm)
@timed_state(duration=1.2, next_state='drive', first=True)
def drive_wait(self, tm, state_tm):
pass
@timed_state(duration=1.4, next_state='launch')
def drive(self, tm, state_tm):
self.drive.move(0, self.drive_speed, 0)
@timed_state(duration=1.0)
def launch(self, tm):
self.catapult.launchNoSensor()
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
from common.autonomous_helper import StatefulAutonomous, timed_state
class TimedShootAutonomous(StatefulAutonomous):
'''
Tunable autonomous mode that does dumb time-based shooting
decisions. Works consistently.
'''
DEFAULT = False
MODE_NAME = "Timed shoot"
def __init__(self, components):
super().__init__(components)
self.register_sd_var('drive_speed', 0.5)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, tm):
# always keep the arm down
self.intake.armDown()
if tm > 0.3:
self.catapult.pulldown()
super().update(tm)
@timed_state(duration=1.2, next_state='drive', first=True)
def drive_wait(self, tm, state_tm):
'''Wait some period before we start driving'''
pass
@timed_state(duration=1.4, next_state='launch')
def drive(self, tm, state_tm):
'''Start the launch sequence! Drive slowly forward for N seconds'''
self.drive.move(0, self.drive_speed, 0)
@timed_state(duration=1.0)
def launch(self, tm):
'''Finally, fire and keep firing for 1 seconds'''
self.catapult.launchNoSensor()
|
Add comments for timed shoot
|
Add comments for timed shoot
|
Python
|
bsd-3-clause
|
frc1418/2014
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
from common.autonomous_helper import StatefulAutonomous, timed_state
class TimedShootAutonomous(StatefulAutonomous):
'''
Tunable autonomous mode that does dumb time-based shooting
decisions. Works consistently.
'''
DEFAULT = False
MODE_NAME = "Timed shoot"
def __init__(self, components):
super().__init__(components)
self.register_sd_var('drive_speed', 0.5)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, tm):
# always keep the arm down
self.intake.armDown()
if tm > 0.3:
self.catapult.pulldown()
super().update(tm)
@timed_state(duration=1.2, next_state='drive', first=True)
def drive_wait(self, tm, state_tm):
+ '''Wait some period before we start driving'''
pass
@timed_state(duration=1.4, next_state='launch')
def drive(self, tm, state_tm):
+ '''Start the launch sequence! Drive slowly forward for N seconds'''
self.drive.move(0, self.drive_speed, 0)
@timed_state(duration=1.0)
def launch(self, tm):
+ '''Finally, fire and keep firing for 1 seconds'''
self.catapult.launchNoSensor()
|
Add comments for timed shoot
|
## Code Before:
try:
import wpilib
except ImportError:
from pyfrc import wpilib
from common.autonomous_helper import StatefulAutonomous, timed_state
class TimedShootAutonomous(StatefulAutonomous):
'''
Tunable autonomous mode that does dumb time-based shooting
decisions. Works consistently.
'''
DEFAULT = False
MODE_NAME = "Timed shoot"
def __init__(self, components):
super().__init__(components)
self.register_sd_var('drive_speed', 0.5)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, tm):
# always keep the arm down
self.intake.armDown()
if tm > 0.3:
self.catapult.pulldown()
super().update(tm)
@timed_state(duration=1.2, next_state='drive', first=True)
def drive_wait(self, tm, state_tm):
pass
@timed_state(duration=1.4, next_state='launch')
def drive(self, tm, state_tm):
self.drive.move(0, self.drive_speed, 0)
@timed_state(duration=1.0)
def launch(self, tm):
self.catapult.launchNoSensor()
## Instruction:
Add comments for timed shoot
## Code After:
try:
import wpilib
except ImportError:
from pyfrc import wpilib
from common.autonomous_helper import StatefulAutonomous, timed_state
class TimedShootAutonomous(StatefulAutonomous):
'''
Tunable autonomous mode that does dumb time-based shooting
decisions. Works consistently.
'''
DEFAULT = False
MODE_NAME = "Timed shoot"
def __init__(self, components):
super().__init__(components)
self.register_sd_var('drive_speed', 0.5)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, tm):
# always keep the arm down
self.intake.armDown()
if tm > 0.3:
self.catapult.pulldown()
super().update(tm)
@timed_state(duration=1.2, next_state='drive', first=True)
def drive_wait(self, tm, state_tm):
'''Wait some period before we start driving'''
pass
@timed_state(duration=1.4, next_state='launch')
def drive(self, tm, state_tm):
'''Start the launch sequence! Drive slowly forward for N seconds'''
self.drive.move(0, self.drive_speed, 0)
@timed_state(duration=1.0)
def launch(self, tm):
'''Finally, fire and keep firing for 1 seconds'''
self.catapult.launchNoSensor()
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
from common.autonomous_helper import StatefulAutonomous, timed_state
class TimedShootAutonomous(StatefulAutonomous):
'''
Tunable autonomous mode that does dumb time-based shooting
decisions. Works consistently.
'''
DEFAULT = False
MODE_NAME = "Timed shoot"
def __init__(self, components):
super().__init__(components)
self.register_sd_var('drive_speed', 0.5)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, tm):
# always keep the arm down
self.intake.armDown()
if tm > 0.3:
self.catapult.pulldown()
super().update(tm)
@timed_state(duration=1.2, next_state='drive', first=True)
def drive_wait(self, tm, state_tm):
+ '''Wait some period before we start driving'''
pass
@timed_state(duration=1.4, next_state='launch')
def drive(self, tm, state_tm):
+ '''Start the launch sequence! Drive slowly forward for N seconds'''
self.drive.move(0, self.drive_speed, 0)
@timed_state(duration=1.0)
def launch(self, tm):
+ '''Finally, fire and keep firing for 1 seconds'''
self.catapult.launchNoSensor()
|
eac383015161f661de33a94dae958a21761071dc
|
zeus/run.py
|
zeus/run.py
|
from app import app
import config
from rest_api.controller import rest_blueprint
app.register_blueprint(rest_blueprint)
if __name__ == "__main__":
app.run()
|
from app import app
import config
from rest_api.controller import rest_blueprint
app.register_blueprint(rest_blueprint, url_prefix="/api")
if __name__ == "__main__":
app.run()
|
Move all api routes into /api/
|
Move all api routes into /api/
|
Python
|
bsd-2-clause
|
nbroeking/OPLabs,jrahm/OPLabs,jrahm/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,ZachAnders/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,nbroeking/OPLabs,jrahm/OPLabs,jrahm/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,jrahm/OPLabs,jrahm/OPLabs,ZachAnders/OPLabs
|
from app import app
import config
from rest_api.controller import rest_blueprint
- app.register_blueprint(rest_blueprint)
+ app.register_blueprint(rest_blueprint, url_prefix="/api")
if __name__ == "__main__":
app.run()
|
Move all api routes into /api/
|
## Code Before:
from app import app
import config
from rest_api.controller import rest_blueprint
app.register_blueprint(rest_blueprint)
if __name__ == "__main__":
app.run()
## Instruction:
Move all api routes into /api/
## Code After:
from app import app
import config
from rest_api.controller import rest_blueprint
app.register_blueprint(rest_blueprint, url_prefix="/api")
if __name__ == "__main__":
app.run()
|
from app import app
import config
from rest_api.controller import rest_blueprint
- app.register_blueprint(rest_blueprint)
+ app.register_blueprint(rest_blueprint, url_prefix="/api")
? +++++++++++++++++++
if __name__ == "__main__":
app.run()
|
7adcf50f27e805931e7bb4c39fa07fa346710acf
|
anserv/modules/mixpanel/generic_event_handlers.py
|
anserv/modules/mixpanel/generic_event_handlers.py
|
from modules.mixpanel.mixpanel import track_event_mixpanel
from modules.decorators import view, query, event_handler
import re
SINGLE_PAGES_TO_TRACK = ['/', '/dashboard', '/create_account']
REGEX_PAGES_TO_TRACK = ['/course', '/about']
@event_handler()
def single_page_track_event(fs, db, response):
for resp in response:
if resp['event_type'] in SINGLE_PAGES_TO_TRACK:
user = resp["username"]
track_event_mixpanel(resp['event_type'],{'user' : user, 'distinct_id' : user})
@event_handler()
def regex_track_event(fs,db,response):
for rep in response:
for regex in REGEX_PAGES_TO_TRACK:
match = re.search(regex, resp['event_type'])
if match is not None:
track_event_mixpanel(regex,{'user' : user, 'distinct_id' : user, 'full_url' : resp['event_type']})
|
from modules.mixpanel.mixpanel import track_event_mixpanel
from modules.decorators import view, query, event_handler
import re
SINGLE_PAGES_TO_TRACK = ['/', '/dashboard', '/create_account']
COURSE_PAGES_TO_TRACK = ['/courses', '/about']
@event_handler()
def single_page_track_event(fs, db, response):
for resp in response:
if resp['event_type'] in SINGLE_PAGES_TO_TRACK:
user = resp["username"]
track_event_mixpanel(resp['event_type'],{'user' : user, 'distinct_id' : user})
@event_handler()
def course_track_event(fs,db,response):
for resp in response:
for regex in COURSE_PAGES_TO_TRACK:
match = re.search(regex, resp['event_type'])
user = resp["username"]
if match is not None:
split_url = resp['event_type'].split("/")
org = split_url[2]
course = split_url[3]
track_event_mixpanel(regex,{'user' : user, 'distinct_id' : user, 'full_url' : resp['event_type'], 'course' : course, 'org' : org})
|
Fix up mixpanel course tracking
|
Fix up mixpanel course tracking
|
Python
|
agpl-3.0
|
edx/edxanalytics,edx/edxanalytics,edx/insights,edx/edxanalytics,edx/edxanalytics,edx/insights
|
from modules.mixpanel.mixpanel import track_event_mixpanel
from modules.decorators import view, query, event_handler
import re
SINGLE_PAGES_TO_TRACK = ['/', '/dashboard', '/create_account']
- REGEX_PAGES_TO_TRACK = ['/course', '/about']
+ COURSE_PAGES_TO_TRACK = ['/courses', '/about']
@event_handler()
def single_page_track_event(fs, db, response):
for resp in response:
if resp['event_type'] in SINGLE_PAGES_TO_TRACK:
user = resp["username"]
track_event_mixpanel(resp['event_type'],{'user' : user, 'distinct_id' : user})
@event_handler()
- def regex_track_event(fs,db,response):
+ def course_track_event(fs,db,response):
- for rep in response:
+ for resp in response:
- for regex in REGEX_PAGES_TO_TRACK:
+ for regex in COURSE_PAGES_TO_TRACK:
match = re.search(regex, resp['event_type'])
+ user = resp["username"]
if match is not None:
+ split_url = resp['event_type'].split("/")
+ org = split_url[2]
+ course = split_url[3]
- track_event_mixpanel(regex,{'user' : user, 'distinct_id' : user, 'full_url' : resp['event_type']})
+ track_event_mixpanel(regex,{'user' : user, 'distinct_id' : user, 'full_url' : resp['event_type'], 'course' : course, 'org' : org})
|
Fix up mixpanel course tracking
|
## Code Before:
from modules.mixpanel.mixpanel import track_event_mixpanel
from modules.decorators import view, query, event_handler
import re
SINGLE_PAGES_TO_TRACK = ['/', '/dashboard', '/create_account']
REGEX_PAGES_TO_TRACK = ['/course', '/about']
@event_handler()
def single_page_track_event(fs, db, response):
for resp in response:
if resp['event_type'] in SINGLE_PAGES_TO_TRACK:
user = resp["username"]
track_event_mixpanel(resp['event_type'],{'user' : user, 'distinct_id' : user})
@event_handler()
def regex_track_event(fs,db,response):
for rep in response:
for regex in REGEX_PAGES_TO_TRACK:
match = re.search(regex, resp['event_type'])
if match is not None:
track_event_mixpanel(regex,{'user' : user, 'distinct_id' : user, 'full_url' : resp['event_type']})
## Instruction:
Fix up mixpanel course tracking
## Code After:
from modules.mixpanel.mixpanel import track_event_mixpanel
from modules.decorators import view, query, event_handler
import re
SINGLE_PAGES_TO_TRACK = ['/', '/dashboard', '/create_account']
COURSE_PAGES_TO_TRACK = ['/courses', '/about']
@event_handler()
def single_page_track_event(fs, db, response):
for resp in response:
if resp['event_type'] in SINGLE_PAGES_TO_TRACK:
user = resp["username"]
track_event_mixpanel(resp['event_type'],{'user' : user, 'distinct_id' : user})
@event_handler()
def course_track_event(fs,db,response):
for resp in response:
for regex in COURSE_PAGES_TO_TRACK:
match = re.search(regex, resp['event_type'])
user = resp["username"]
if match is not None:
split_url = resp['event_type'].split("/")
org = split_url[2]
course = split_url[3]
track_event_mixpanel(regex,{'user' : user, 'distinct_id' : user, 'full_url' : resp['event_type'], 'course' : course, 'org' : org})
|
from modules.mixpanel.mixpanel import track_event_mixpanel
from modules.decorators import view, query, event_handler
import re
SINGLE_PAGES_TO_TRACK = ['/', '/dashboard', '/create_account']
- REGEX_PAGES_TO_TRACK = ['/course', '/about']
? ---
+ COURSE_PAGES_TO_TRACK = ['/courses', '/about']
? +++ + +
@event_handler()
def single_page_track_event(fs, db, response):
for resp in response:
if resp['event_type'] in SINGLE_PAGES_TO_TRACK:
user = resp["username"]
track_event_mixpanel(resp['event_type'],{'user' : user, 'distinct_id' : user})
@event_handler()
- def regex_track_event(fs,db,response):
? ---
+ def course_track_event(fs,db,response):
? +++ +
- for rep in response:
+ for resp in response:
? +
- for regex in REGEX_PAGES_TO_TRACK:
? ---
+ for regex in COURSE_PAGES_TO_TRACK:
? +++ +
match = re.search(regex, resp['event_type'])
+ user = resp["username"]
if match is not None:
+ split_url = resp['event_type'].split("/")
+ org = split_url[2]
+ course = split_url[3]
- track_event_mixpanel(regex,{'user' : user, 'distinct_id' : user, 'full_url' : resp['event_type']})
+ track_event_mixpanel(regex,{'user' : user, 'distinct_id' : user, 'full_url' : resp['event_type'], 'course' : course, 'org' : org})
? ++++++++++++++++++++++++++++++++
|
889eed552f4e17797764a9d9a2da6bbaa6d5dd33
|
admin_panel/views.py
|
admin_panel/views.py
|
from django.views import View
from django.views.generic import TemplateView
from django.contrib import auth
from django.contrib import messages
from django import http
class LoginView(TemplateView):
template_name = "admin/login.html"
def post(self, request):
username = request.POST['username']
password = request.POST['password']
user_object = auth.authenticate(request, username=username, password=password)
if user_object is None:
messages.error(request, "Invalid credentials")
return self.get(request)
auth.login(request, user_object)
messages.success(request, "You've been logged in")
return http.HttpResponseRedirect(self.get_next_url(request))
def get_next_url(self, request):
if "next" in request.GET:
return request.GET['next']
else:
return "/administration/panel"
class Panel(TemplateView):
template_name = "admin/panel.html"
class LogoutView(View):
def get(self, request):
auth.logout(request)
return http.HttpResponseRedirect("/administration/login")
|
from django.views import View
from django.views.generic import TemplateView
from django.contrib import auth
from django.contrib import messages
from django import http
from django.urls import reverse
class LoginView(TemplateView):
template_name = "admin/login.html"
def post(self, request):
username = request.POST['username']
password = request.POST['password']
user_object = auth.authenticate(request, username=username, password=password)
if user_object is None:
messages.error(request, "Invalid credentials")
return self.get(request)
auth.login(request, user_object)
messages.success(request, "You've been logged in")
return http.HttpResponseRedirect(self.get_next_url(request))
def get_next_url(self, request):
if "next" in request.GET:
return request.GET['next']
else:
return reverse("admin:Panel")
class Panel(TemplateView):
template_name = "admin/panel.html"
class LogoutView(View):
def get(self, request):
auth.logout(request)
return http.HttpResponseRedirect("/administration/login")
|
Use django reverse function to obtain url instead of hard-coding
|
Use django reverse function to obtain url instead of hard-coding
|
Python
|
mpl-2.0
|
Apo11onian/Apollo-Blog,Apo11onian/Apollo-Blog,Apo11onian/Apollo-Blog
|
from django.views import View
from django.views.generic import TemplateView
from django.contrib import auth
from django.contrib import messages
from django import http
+ from django.urls import reverse
class LoginView(TemplateView):
template_name = "admin/login.html"
def post(self, request):
username = request.POST['username']
password = request.POST['password']
user_object = auth.authenticate(request, username=username, password=password)
if user_object is None:
messages.error(request, "Invalid credentials")
return self.get(request)
auth.login(request, user_object)
messages.success(request, "You've been logged in")
return http.HttpResponseRedirect(self.get_next_url(request))
def get_next_url(self, request):
if "next" in request.GET:
return request.GET['next']
else:
- return "/administration/panel"
+ return reverse("admin:Panel")
class Panel(TemplateView):
template_name = "admin/panel.html"
class LogoutView(View):
def get(self, request):
auth.logout(request)
return http.HttpResponseRedirect("/administration/login")
|
Use django reverse function to obtain url instead of hard-coding
|
## Code Before:
from django.views import View
from django.views.generic import TemplateView
from django.contrib import auth
from django.contrib import messages
from django import http
class LoginView(TemplateView):
template_name = "admin/login.html"
def post(self, request):
username = request.POST['username']
password = request.POST['password']
user_object = auth.authenticate(request, username=username, password=password)
if user_object is None:
messages.error(request, "Invalid credentials")
return self.get(request)
auth.login(request, user_object)
messages.success(request, "You've been logged in")
return http.HttpResponseRedirect(self.get_next_url(request))
def get_next_url(self, request):
if "next" in request.GET:
return request.GET['next']
else:
return "/administration/panel"
class Panel(TemplateView):
template_name = "admin/panel.html"
class LogoutView(View):
def get(self, request):
auth.logout(request)
return http.HttpResponseRedirect("/administration/login")
## Instruction:
Use django reverse function to obtain url instead of hard-coding
## Code After:
from django.views import View
from django.views.generic import TemplateView
from django.contrib import auth
from django.contrib import messages
from django import http
from django.urls import reverse
class LoginView(TemplateView):
template_name = "admin/login.html"
def post(self, request):
username = request.POST['username']
password = request.POST['password']
user_object = auth.authenticate(request, username=username, password=password)
if user_object is None:
messages.error(request, "Invalid credentials")
return self.get(request)
auth.login(request, user_object)
messages.success(request, "You've been logged in")
return http.HttpResponseRedirect(self.get_next_url(request))
def get_next_url(self, request):
if "next" in request.GET:
return request.GET['next']
else:
return reverse("admin:Panel")
class Panel(TemplateView):
template_name = "admin/panel.html"
class LogoutView(View):
def get(self, request):
auth.logout(request)
return http.HttpResponseRedirect("/administration/login")
|
from django.views import View
from django.views.generic import TemplateView
from django.contrib import auth
from django.contrib import messages
from django import http
+ from django.urls import reverse
class LoginView(TemplateView):
template_name = "admin/login.html"
def post(self, request):
username = request.POST['username']
password = request.POST['password']
user_object = auth.authenticate(request, username=username, password=password)
if user_object is None:
messages.error(request, "Invalid credentials")
return self.get(request)
auth.login(request, user_object)
messages.success(request, "You've been logged in")
return http.HttpResponseRedirect(self.get_next_url(request))
def get_next_url(self, request):
if "next" in request.GET:
return request.GET['next']
else:
- return "/administration/panel"
+ return reverse("admin:Panel")
class Panel(TemplateView):
template_name = "admin/panel.html"
class LogoutView(View):
def get(self, request):
auth.logout(request)
return http.HttpResponseRedirect("/administration/login")
|
31d0cd541980ef6bf15d3a29b68cc0cc994c28a4
|
packs/st2cd/actions/kvstore.py
|
packs/st2cd/actions/kvstore.py
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = KeyValuePair()
instance.id = client.keys.get_by_name(key).name
instance.name = key
instance.value = value
try:
kvstore = getattr(client.keys, action)
kvp = kvstore(instance)
except Exception as e:
raise
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = client.keys.get_by_name(key) or KeyValuePair()
instance.id = key
instance.name = key
instance.value = value
kvp = client.keys.update(instance) if action in ['create', 'update'] else None
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
Fix create action for key value pair
|
Fix create action for key value pair
|
Python
|
apache-2.0
|
StackStorm/st2incubator,pinterb/st2incubator,pinterb/st2incubator,pinterb/st2incubator,StackStorm/st2incubator
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
- instance = KeyValuePair()
- instance.id = client.keys.get_by_name(key).name
+ instance = client.keys.get_by_name(key) or KeyValuePair()
+ instance.id = key
instance.name = key
instance.value = value
+ kvp = client.keys.update(instance) if action in ['create', 'update'] else None
- try:
- kvstore = getattr(client.keys, action)
- kvp = kvstore(instance)
- except Exception as e:
- raise
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
Fix create action for key value pair
|
## Code Before:
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = KeyValuePair()
instance.id = client.keys.get_by_name(key).name
instance.name = key
instance.value = value
try:
kvstore = getattr(client.keys, action)
kvp = kvstore(instance)
except Exception as e:
raise
if action == 'delete':
return kvp
else:
return kvp.serialize()
## Instruction:
Fix create action for key value pair
## Code After:
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = client.keys.get_by_name(key) or KeyValuePair()
instance.id = key
instance.name = key
instance.value = value
kvp = client.keys.update(instance) if action in ['create', 'update'] else None
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
- instance = KeyValuePair()
- instance.id = client.keys.get_by_name(key).name
? --- ^^ ^
+ instance = client.keys.get_by_name(key) or KeyValuePair()
? ^^^^^^^^ ^^ ++++++
+ instance.id = key
instance.name = key
instance.value = value
+ kvp = client.keys.update(instance) if action in ['create', 'update'] else None
- try:
- kvstore = getattr(client.keys, action)
- kvp = kvstore(instance)
- except Exception as e:
- raise
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
6cf2a3966e12af5f86781a5d20c0810953722811
|
tests/basics/scope.py
|
tests/basics/scope.py
|
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
|
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
# nonlocal at inner-inner level (h)
def f():
x = 1
def g():
def h():
nonlocal x
return x
return h
return g
print(f()()())
# nonlocal declared at outer level (g), and referenced by inner level (h)
def f():
x = 1
def g():
nonlocal x
def h():
return x
return h
return g
print(f()()())
|
Add further tests for nonlocal scoping and closures.
|
tests/basics: Add further tests for nonlocal scoping and closures.
|
Python
|
mit
|
lowRISC/micropython,ryannathans/micropython,tralamazza/micropython,micropython/micropython-esp32,cwyark/micropython,deshipu/micropython,lowRISC/micropython,alex-march/micropython,adafruit/micropython,Peetz0r/micropython-esp32,SHA2017-badge/micropython-esp32,turbinenreiter/micropython,deshipu/micropython,ryannathans/micropython,HenrikSolver/micropython,trezor/micropython,micropython/micropython-esp32,hiway/micropython,kerneltask/micropython,mhoffma/micropython,MrSurly/micropython-esp32,chrisdearman/micropython,hosaka/micropython,selste/micropython,infinnovation/micropython,tobbad/micropython,adafruit/circuitpython,tobbad/micropython,PappaPeppar/micropython,bvernoux/micropython,toolmacher/micropython,matthewelse/micropython,dmazzella/micropython,turbinenreiter/micropython,torwag/micropython,MrSurly/micropython,hiway/micropython,kerneltask/micropython,hosaka/micropython,lowRISC/micropython,mhoffma/micropython,cwyark/micropython,pramasoul/micropython,alex-robbins/micropython,dmazzella/micropython,infinnovation/micropython,bvernoux/micropython,oopy/micropython,selste/micropython,jmarcelino/pycom-micropython,turbinenreiter/micropython,mhoffma/micropython,tuc-osg/micropython,MrSurly/micropython,adafruit/circuitpython,dxxb/micropython,pozetroninc/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,Timmenem/micropython,bvernoux/micropython,alex-robbins/micropython,turbinenreiter/micropython,puuu/micropython,dxxb/micropython,pozetroninc/micropython,TDAbboud/micropython,puuu/micropython,hosaka/micropython,deshipu/micropython,tuc-osg/micropython,AriZuu/micropython,Peetz0r/micropython-esp32,adafruit/circuitpython,adafruit/circuitpython,MrSurly/micropython,toolmacher/micropython,hosaka/micropython,lowRISC/micropython,henriknelson/micropython,pfalcon/micropython,bvernoux/micropython,HenrikSolver/micropython,blazewicz/micropython,PappaPeppar/micropython,alex-march/micropython,henriknelson/micropython,dxxb/micropython,blazewicz/micropython,Timmenem/micropython,blazewicz/micropython,blazewicz/micropython,tobbad/micropython,swegener/micropython,oopy/micropython,TDAbboud/micropython,jmarcelino/pycom-micropython,hiway/micropython,jmarcelino/pycom-micropython,PappaPeppar/micropython,HenrikSolver/micropython,dmazzella/micropython,trezor/micropython,cwyark/micropython,kerneltask/micropython,tobbad/micropython,TDAbboud/micropython,matthewelse/micropython,chrisdearman/micropython,torwag/micropython,ryannathans/micropython,pramasoul/micropython,trezor/micropython,oopy/micropython,toolmacher/micropython,adafruit/micropython,pfalcon/micropython,chrisdearman/micropython,Timmenem/micropython,Timmenem/micropython,puuu/micropython,AriZuu/micropython,dxxb/micropython,swegener/micropython,swegener/micropython,MrSurly/micropython,pramasoul/micropython,hosaka/micropython,alex-robbins/micropython,micropython/micropython-esp32,selste/micropython,tralamazza/micropython,tralamazza/micropython,matthewelse/micropython,mhoffma/micropython,Peetz0r/micropython-esp32,selste/micropython,alex-march/micropython,tralamazza/micropython,adafruit/circuitpython,pfalcon/micropython,adafruit/circuitpython,jmarcelino/pycom-micropython,tuc-osg/micropython,TDAbboud/micropython,henriknelson/micropython,MrSurly/micropython-esp32,torwag/micropython,chrisdearman/micropython,puuu/micropython,SHA2017-badge/micropython-esp32,MrSurly/micropython-esp32,Peetz0r/micropython-esp32,micropython/micropython-esp32,hiway/micropython,torwag/micropython,SHA2017-badge/micropython-esp32,mhoffma/micropython,AriZuu/micropython,oopy/micropython,pfalcon/micropython,pramasoul/micropython,MrSurly/micropython,hiway/micropython,toolmacher/micropython,AriZuu/micropython,kerneltask/micropython,chrisdearman/micropython,selste/micropython,micropython/micropython-esp32,matthewelse/micropython,PappaPeppar/micropython,matthewelse/micropython,matthewelse/micropython,deshipu/micropython,bvernoux/micropython,Timmenem/micropython,cwyark/micropython,alex-robbins/micropython,TDAbboud/micropython,henriknelson/micropython,adafruit/micropython,oopy/micropython,alex-march/micropython,dmazzella/micropython,MrSurly/micropython-esp32,turbinenreiter/micropython,kerneltask/micropython,PappaPeppar/micropython,pozetroninc/micropython,pozetroninc/micropython,tobbad/micropython,blazewicz/micropython,swegener/micropython,tuc-osg/micropython,trezor/micropython,alex-robbins/micropython,torwag/micropython,dxxb/micropython,adafruit/micropython,HenrikSolver/micropython,cwyark/micropython,infinnovation/micropython,HenrikSolver/micropython,pfalcon/micropython,adafruit/micropython,infinnovation/micropython,ryannathans/micropython,henriknelson/micropython,toolmacher/micropython,MrSurly/micropython-esp32,lowRISC/micropython,AriZuu/micropython,Peetz0r/micropython-esp32,alex-march/micropython,SHA2017-badge/micropython-esp32,pramasoul/micropython,tuc-osg/micropython,infinnovation/micropython,puuu/micropython,jmarcelino/pycom-micropython,trezor/micropython,pozetroninc/micropython,swegener/micropython,ryannathans/micropython
|
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
+ # nonlocal at inner-inner level (h)
+ def f():
+ x = 1
+ def g():
+ def h():
+ nonlocal x
+ return x
+ return h
+ return g
+ print(f()()())
+
+ # nonlocal declared at outer level (g), and referenced by inner level (h)
+ def f():
+ x = 1
+ def g():
+ nonlocal x
+ def h():
+ return x
+ return h
+ return g
+ print(f()()())
+
|
Add further tests for nonlocal scoping and closures.
|
## Code Before:
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
## Instruction:
Add further tests for nonlocal scoping and closures.
## Code After:
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
# nonlocal at inner-inner level (h)
def f():
x = 1
def g():
def h():
nonlocal x
return x
return h
return g
print(f()()())
# nonlocal declared at outer level (g), and referenced by inner level (h)
def f():
x = 1
def g():
nonlocal x
def h():
return x
return h
return g
print(f()()())
|
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
+
+ # nonlocal at inner-inner level (h)
+ def f():
+ x = 1
+ def g():
+ def h():
+ nonlocal x
+ return x
+ return h
+ return g
+ print(f()()())
+
+ # nonlocal declared at outer level (g), and referenced by inner level (h)
+ def f():
+ x = 1
+ def g():
+ nonlocal x
+ def h():
+ return x
+ return h
+ return g
+ print(f()()())
|
eb1c913a0800e2d5eabf34e7abce96c8f4096d79
|
marble/tests/test_neighbourhoods.py
|
marble/tests/test_neighbourhoods.py
|
""" Tests for the extraction of neighbourhoods """
from nose.tools import *
import marble as mb
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
|
""" Tests for the extraction of neighbourhoods """
from nose.tools import *
import itertools
from shapely.geometry import Polygon
import marble as mb
from marble.neighbourhoods import _adjacency
#
# Synthetic data for tests
#
def grid():
au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
units = {a:Polygon([(a%3, a/3),
(a%3, 1+a/3),
(1+a%3, 1+a/3),
(1+a%3, a/3)]) for a in au}
return units
class TestNeighbourhoods(object):
def test_adjacency(test):
""" Test the extraction of the adjacency list """
units = grid()
adj = _adjacency(units)
adj_answer = {0:[1,3],
1:[0,4,2],
2:[1,5],
3:[0,4,6],
4:[1,3,5,7],
5:[2,4,8],
6:[3,7],
7:[4,6,8],
8:[5,7]}
for au in adj:
assert set(adj[au]) == set(adj_answer[au])
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
|
Test the adjacency matrix finder
|
Test the adjacency matrix finder
|
Python
|
bsd-3-clause
|
scities/marble,walkerke/marble
|
""" Tests for the extraction of neighbourhoods """
from nose.tools import *
+ import itertools
+ from shapely.geometry import Polygon
import marble as mb
+ from marble.neighbourhoods import _adjacency
+
+
+ #
+ # Synthetic data for tests
+ #
+ def grid():
+ au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
+ units = {a:Polygon([(a%3, a/3),
+ (a%3, 1+a/3),
+ (1+a%3, 1+a/3),
+ (1+a%3, a/3)]) for a in au}
+ return units
+
+
+
+ class TestNeighbourhoods(object):
+
+ def test_adjacency(test):
+ """ Test the extraction of the adjacency list """
+ units = grid()
+ adj = _adjacency(units)
+ adj_answer = {0:[1,3],
+ 1:[0,4,2],
+ 2:[1,5],
+ 3:[0,4,6],
+ 4:[1,3,5,7],
+ 5:[2,4,8],
+ 6:[3,7],
+ 7:[4,6,8],
+ 8:[5,7]}
+
+ for au in adj:
+ assert set(adj[au]) == set(adj_answer[au])
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
|
Test the adjacency matrix finder
|
## Code Before:
""" Tests for the extraction of neighbourhoods """
from nose.tools import *
import marble as mb
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
## Instruction:
Test the adjacency matrix finder
## Code After:
""" Tests for the extraction of neighbourhoods """
from nose.tools import *
import itertools
from shapely.geometry import Polygon
import marble as mb
from marble.neighbourhoods import _adjacency
#
# Synthetic data for tests
#
def grid():
au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
units = {a:Polygon([(a%3, a/3),
(a%3, 1+a/3),
(1+a%3, 1+a/3),
(1+a%3, a/3)]) for a in au}
return units
class TestNeighbourhoods(object):
def test_adjacency(test):
""" Test the extraction of the adjacency list """
units = grid()
adj = _adjacency(units)
adj_answer = {0:[1,3],
1:[0,4,2],
2:[1,5],
3:[0,4,6],
4:[1,3,5,7],
5:[2,4,8],
6:[3,7],
7:[4,6,8],
8:[5,7]}
for au in adj:
assert set(adj[au]) == set(adj_answer[au])
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
|
""" Tests for the extraction of neighbourhoods """
from nose.tools import *
+ import itertools
+ from shapely.geometry import Polygon
import marble as mb
+ from marble.neighbourhoods import _adjacency
+
+
+ #
+ # Synthetic data for tests
+ #
+ def grid():
+ au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
+ units = {a:Polygon([(a%3, a/3),
+ (a%3, 1+a/3),
+ (1+a%3, 1+a/3),
+ (1+a%3, a/3)]) for a in au}
+ return units
+
+
+
+ class TestNeighbourhoods(object):
+
+ def test_adjacency(test):
+ """ Test the extraction of the adjacency list """
+ units = grid()
+ adj = _adjacency(units)
+ adj_answer = {0:[1,3],
+ 1:[0,4,2],
+ 2:[1,5],
+ 3:[0,4,6],
+ 4:[1,3,5,7],
+ 5:[2,4,8],
+ 6:[3,7],
+ 7:[4,6,8],
+ 8:[5,7]}
+
+ for au in adj:
+ assert set(adj[au]) == set(adj_answer[au])
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
|
ee7d663f3c7e5c52581527167938d81ca2a07a3d
|
bisnode/models.py
|
bisnode/models.py
|
from datetime import datetime
from django.db import models
from .constants import COMPANY_RATING_REPORT, RATING_CHOICES
from .bisnode import get_bisnode_company_report
def bisnode_date_to_date(bisnode_date):
formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d")
return formatted_datetime.date()
class BisnodeRatingReport(models.Model):
organization_number = models.CharField(max_length=10)
rating = models.CharField(max_length=3, choices=RATING_CHOICES,
null=True, blank=True)
date_of_rating = models.DateField(blank=True, null=True)
registration_date = models.DateField(blank=True, null=True)
last_updated = models.DateTimeField(auto_now=True)
def get(self):
rating_report = get_bisnode_company_report(
report_type=COMPANY_RATING_REPORT,
organization_number=self.organization_number)
company_data = rating_report.generalCompanyData[0]
self.rating = company_data['ratingCode']
self.date_of_rating = bisnode_date_to_date(
company_data['dateOfRating'])
self.registration_date = bisnode_date_to_date(
company_data['dateReg'])
self.save()
|
from datetime import datetime
from django.db import models
from .constants import COMPANY_RATING_REPORT, RATING_CHOICES
from .bisnode import get_bisnode_company_report
def bisnode_date_to_date(bisnode_date):
formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d")
return formatted_datetime.date()
class BisnodeRatingReport(models.Model):
organization_number = models.CharField(max_length=10, unique=True)
rating = models.CharField(max_length=3, choices=RATING_CHOICES,
null=True, blank=True)
date_of_rating = models.DateField(blank=True, null=True)
registration_date = models.DateField(blank=True, null=True)
last_updated = models.DateTimeField(auto_now=True)
def get(self):
rating_report = get_bisnode_company_report(
report_type=COMPANY_RATING_REPORT,
organization_number=self.organization_number)
company_data = rating_report.generalCompanyData[0]
self.rating = company_data['ratingCode']
self.date_of_rating = bisnode_date_to_date(
company_data['dateOfRating'])
self.registration_date = bisnode_date_to_date(
company_data['dateReg'])
self.save()
|
Make Organization Number a unique field
|
Make Organization Number a unique field
|
Python
|
mit
|
FundedByMe/django-bisnode
|
from datetime import datetime
from django.db import models
from .constants import COMPANY_RATING_REPORT, RATING_CHOICES
from .bisnode import get_bisnode_company_report
def bisnode_date_to_date(bisnode_date):
formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d")
return formatted_datetime.date()
class BisnodeRatingReport(models.Model):
- organization_number = models.CharField(max_length=10)
+ organization_number = models.CharField(max_length=10, unique=True)
rating = models.CharField(max_length=3, choices=RATING_CHOICES,
null=True, blank=True)
date_of_rating = models.DateField(blank=True, null=True)
registration_date = models.DateField(blank=True, null=True)
last_updated = models.DateTimeField(auto_now=True)
def get(self):
rating_report = get_bisnode_company_report(
report_type=COMPANY_RATING_REPORT,
organization_number=self.organization_number)
company_data = rating_report.generalCompanyData[0]
self.rating = company_data['ratingCode']
self.date_of_rating = bisnode_date_to_date(
company_data['dateOfRating'])
self.registration_date = bisnode_date_to_date(
company_data['dateReg'])
self.save()
|
Make Organization Number a unique field
|
## Code Before:
from datetime import datetime
from django.db import models
from .constants import COMPANY_RATING_REPORT, RATING_CHOICES
from .bisnode import get_bisnode_company_report
def bisnode_date_to_date(bisnode_date):
formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d")
return formatted_datetime.date()
class BisnodeRatingReport(models.Model):
organization_number = models.CharField(max_length=10)
rating = models.CharField(max_length=3, choices=RATING_CHOICES,
null=True, blank=True)
date_of_rating = models.DateField(blank=True, null=True)
registration_date = models.DateField(blank=True, null=True)
last_updated = models.DateTimeField(auto_now=True)
def get(self):
rating_report = get_bisnode_company_report(
report_type=COMPANY_RATING_REPORT,
organization_number=self.organization_number)
company_data = rating_report.generalCompanyData[0]
self.rating = company_data['ratingCode']
self.date_of_rating = bisnode_date_to_date(
company_data['dateOfRating'])
self.registration_date = bisnode_date_to_date(
company_data['dateReg'])
self.save()
## Instruction:
Make Organization Number a unique field
## Code After:
from datetime import datetime
from django.db import models
from .constants import COMPANY_RATING_REPORT, RATING_CHOICES
from .bisnode import get_bisnode_company_report
def bisnode_date_to_date(bisnode_date):
formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d")
return formatted_datetime.date()
class BisnodeRatingReport(models.Model):
organization_number = models.CharField(max_length=10, unique=True)
rating = models.CharField(max_length=3, choices=RATING_CHOICES,
null=True, blank=True)
date_of_rating = models.DateField(blank=True, null=True)
registration_date = models.DateField(blank=True, null=True)
last_updated = models.DateTimeField(auto_now=True)
def get(self):
rating_report = get_bisnode_company_report(
report_type=COMPANY_RATING_REPORT,
organization_number=self.organization_number)
company_data = rating_report.generalCompanyData[0]
self.rating = company_data['ratingCode']
self.date_of_rating = bisnode_date_to_date(
company_data['dateOfRating'])
self.registration_date = bisnode_date_to_date(
company_data['dateReg'])
self.save()
|
from datetime import datetime
from django.db import models
from .constants import COMPANY_RATING_REPORT, RATING_CHOICES
from .bisnode import get_bisnode_company_report
def bisnode_date_to_date(bisnode_date):
formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d")
return formatted_datetime.date()
class BisnodeRatingReport(models.Model):
- organization_number = models.CharField(max_length=10)
+ organization_number = models.CharField(max_length=10, unique=True)
? +++++++++++++
rating = models.CharField(max_length=3, choices=RATING_CHOICES,
null=True, blank=True)
date_of_rating = models.DateField(blank=True, null=True)
registration_date = models.DateField(blank=True, null=True)
last_updated = models.DateTimeField(auto_now=True)
def get(self):
rating_report = get_bisnode_company_report(
report_type=COMPANY_RATING_REPORT,
organization_number=self.organization_number)
company_data = rating_report.generalCompanyData[0]
self.rating = company_data['ratingCode']
self.date_of_rating = bisnode_date_to_date(
company_data['dateOfRating'])
self.registration_date = bisnode_date_to_date(
company_data['dateReg'])
self.save()
|
0cd084550fc5c1315fe33fcb00e57c1c332be6ab
|
indra/tests/test_mesh.py
|
indra/tests/test_mesh.py
|
from indra.databases import mesh_client
def test_mesh_id_lookup():
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
|
from indra.databases import mesh_client
def test_mesh_id_lookup():
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
def test_invalid_id():
mesh_name = mesh_client.get_mesh_name('34jkgfh')
assert mesh_name is None
|
Add test for invalid MESH ID
|
Add test for invalid MESH ID
|
Python
|
bsd-2-clause
|
pvtodorov/indra,johnbachman/indra,pvtodorov/indra,johnbachman/indra,bgyori/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra,sorgerlab/indra,bgyori/indra,pvtodorov/indra,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/indra,bgyori/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy
|
from indra.databases import mesh_client
def test_mesh_id_lookup():
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
+ def test_invalid_id():
+ mesh_name = mesh_client.get_mesh_name('34jkgfh')
+ assert mesh_name is None
+
+
|
Add test for invalid MESH ID
|
## Code Before:
from indra.databases import mesh_client
def test_mesh_id_lookup():
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
## Instruction:
Add test for invalid MESH ID
## Code After:
from indra.databases import mesh_client
def test_mesh_id_lookup():
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
def test_invalid_id():
mesh_name = mesh_client.get_mesh_name('34jkgfh')
assert mesh_name is None
|
from indra.databases import mesh_client
def test_mesh_id_lookup():
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
+
+ def test_invalid_id():
+ mesh_name = mesh_client.get_mesh_name('34jkgfh')
+ assert mesh_name is None
+
|
f908501860858311536a3fef03fda7a632ce5412
|
djohno/tests/test_utils.py
|
djohno/tests/test_utils.py
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
"""
Ensure we can correctly get the version of a few simple apps
(Baz and Moo are bundled in djohno.test, and set up in
test_settings.py).
"""
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
Add a missing test description
|
Add a missing test description
|
Python
|
bsd-2-clause
|
dominicrodger/djohno,dominicrodger/djohno
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
+ """
+ Ensure we can correctly get the version of a few simple apps
+ (Baz and Moo are bundled in djohno.test, and set up in
+ test_settings.py).
+ """
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
Add a missing test description
|
## Code Before:
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
## Instruction:
Add a missing test description
## Code After:
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
"""
Ensure we can correctly get the version of a few simple apps
(Baz and Moo are bundled in djohno.test, and set up in
test_settings.py).
"""
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
+ """
+ Ensure we can correctly get the version of a few simple apps
+ (Baz and Moo are bundled in djohno.test, and set up in
+ test_settings.py).
+ """
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
4d410dec85fc944717a6537e9eef2585a53159b6
|
python_logging_rabbitmq/formatters.py
|
python_logging_rabbitmq/formatters.py
|
import logging
from socket import gethostname
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
del data[f]
return json.dumps(data)
|
import logging
from socket import gethostname
from django.core.serializers.json import DjangoJSONEncoder
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
if f in data:
del data[f]
return json.dumps(data, cls=DjangoJSONEncoder)
|
Use DjangoJSONEncoder for JSON serialization
|
Use DjangoJSONEncoder for JSON serialization
|
Python
|
mit
|
albertomr86/python-logging-rabbitmq
|
import logging
from socket import gethostname
+ from django.core.serializers.json import DjangoJSONEncoder
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
+ if f in data:
- del data[f]
+ del data[f]
- return json.dumps(data)
+ return json.dumps(data, cls=DjangoJSONEncoder)
|
Use DjangoJSONEncoder for JSON serialization
|
## Code Before:
import logging
from socket import gethostname
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
del data[f]
return json.dumps(data)
## Instruction:
Use DjangoJSONEncoder for JSON serialization
## Code After:
import logging
from socket import gethostname
from django.core.serializers.json import DjangoJSONEncoder
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
if f in data:
del data[f]
return json.dumps(data, cls=DjangoJSONEncoder)
|
import logging
from socket import gethostname
+ from django.core.serializers.json import DjangoJSONEncoder
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
+ if f in data:
- del data[f]
+ del data[f]
? +++++
- return json.dumps(data)
+ return json.dumps(data, cls=DjangoJSONEncoder)
|
d410a5295b67b17ca1cdc4d53ed8f776159278bc
|
json2parquet/__init__.py
|
json2parquet/__init__.py
|
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
Make client.write_parquet_dataset available for export
|
Make client.write_parquet_dataset available for export
This commit adds write_parquet_dataset to the imports from .client in
__init__.py
Previously, `from json2parquet import write_parquet_dataset` would
result in an error: `ImportError: cannot import name
'write_parquet_dataset' from 'json2parquet' `
|
Python
|
mit
|
andrewgross/json2parquet
|
from __future__ import unicode_literals
- from .client import load_json, ingest_data, write_parquet, convert_json
+ from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
Make client.write_parquet_dataset available for export
|
## Code Before:
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
## Instruction:
Make client.write_parquet_dataset available for export
## Code After:
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
from __future__ import unicode_literals
- from .client import load_json, ingest_data, write_parquet, convert_json
+ from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
? +++++++++++++++++++++++
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
d5ad324355e0abdf0a6bdcb41e1f07224742b537
|
src/main.py
|
src/main.py
|
import sys
import game
import menu
menu.init()
menu.chooseOption()
|
import game
import menu
menu.init()
menu.chooseOption()
|
Remove needless import of sys module
|
Remove needless import of sys module
|
Python
|
mit
|
TheUnderscores/card-fight-thingy
|
-
- import sys
import game
import menu
menu.init()
menu.chooseOption()
|
Remove needless import of sys module
|
## Code Before:
import sys
import game
import menu
menu.init()
menu.chooseOption()
## Instruction:
Remove needless import of sys module
## Code After:
import game
import menu
menu.init()
menu.chooseOption()
|
-
- import sys
import game
import menu
menu.init()
menu.chooseOption()
|
3916efe4a017fe9e0fb1c5fe09b99f374d7a4060
|
instana/__init__.py
|
instana/__init__.py
|
__author__ = 'Instana Inc.'
__copyright__ = 'Copyright 2016 Instana Inc.'
__credits__ = ['Pavlo Baron']
__license__ = 'MIT'
__version__ = '0.0.1'
__maintainer__ = 'Pavlo Baron'
__email__ = '[email protected]'
__all__ = ['sensor', 'tracer']
|
__author__ = 'Instana Inc.'
__copyright__ = 'Copyright 2017 Instana Inc.'
__credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo']
__license__ = 'MIT'
__version__ = '0.6.6'
__maintainer__ = 'Peter Giacomo Lombardo'
__email__ = '[email protected]'
__all__ = ['sensor', 'tracer']
|
Update module init file; begin version stamping here.
|
Update module init file; begin version stamping here.
|
Python
|
mit
|
instana/python-sensor,instana/python-sensor
|
__author__ = 'Instana Inc.'
- __copyright__ = 'Copyright 2016 Instana Inc.'
+ __copyright__ = 'Copyright 2017 Instana Inc.'
- __credits__ = ['Pavlo Baron']
+ __credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo']
__license__ = 'MIT'
- __version__ = '0.0.1'
+ __version__ = '0.6.6'
- __maintainer__ = 'Pavlo Baron'
+ __maintainer__ = 'Peter Giacomo Lombardo'
- __email__ = '[email protected]'
+ __email__ = '[email protected]'
__all__ = ['sensor', 'tracer']
|
Update module init file; begin version stamping here.
|
## Code Before:
__author__ = 'Instana Inc.'
__copyright__ = 'Copyright 2016 Instana Inc.'
__credits__ = ['Pavlo Baron']
__license__ = 'MIT'
__version__ = '0.0.1'
__maintainer__ = 'Pavlo Baron'
__email__ = '[email protected]'
__all__ = ['sensor', 'tracer']
## Instruction:
Update module init file; begin version stamping here.
## Code After:
__author__ = 'Instana Inc.'
__copyright__ = 'Copyright 2017 Instana Inc.'
__credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo']
__license__ = 'MIT'
__version__ = '0.6.6'
__maintainer__ = 'Peter Giacomo Lombardo'
__email__ = '[email protected]'
__all__ = ['sensor', 'tracer']
|
__author__ = 'Instana Inc.'
- __copyright__ = 'Copyright 2016 Instana Inc.'
? ^
+ __copyright__ = 'Copyright 2017 Instana Inc.'
? ^
- __credits__ = ['Pavlo Baron']
+ __credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo']
__license__ = 'MIT'
- __version__ = '0.0.1'
? ^ ^
+ __version__ = '0.6.6'
? ^ ^
- __maintainer__ = 'Pavlo Baron'
+ __maintainer__ = 'Peter Giacomo Lombardo'
- __email__ = '[email protected]'
? ^^ ^ -
+ __email__ = '[email protected]'
? ^^^^^ ^ +
__all__ = ['sensor', 'tracer']
|
a2b4b53635ab1188e95efd68f64104a469e7ff66
|
scheduler/executor.py
|
scheduler/executor.py
|
import threading
import subprocess
class TestExecutor(threading.Thread):
"""
The general thread to perform the tests executions
"""
def __init__(self, run_id, test_name, queue):
super().__init__()
self.run_id = run_id
self.test_name = test_name
self.queue = queue
# __init __()
def run(self):
"""
Execute the command to perform the test execution. The return
code is enqueued so the scheduler can determine if the run has
completed
"""
with open(
"runs/static/runs/autotests/runs/{}.txt".format(
self.run_id), "w") as f:
CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format(
self.test_name)
return_code = subprocess.call(CMD, stdout=f, shell=True)
self.queue.put((self.run_id, return_code))
# run()
|
import threading
import subprocess
import os
class TestExecutor(threading.Thread):
"""
The general thread to perform the tests executions
"""
def __init__(self, run_id, test_name, queue):
super().__init__()
self.run_id = run_id
self.test_name = test_name
self.queue = queue
# __init __()
def run(self):
"""
Execute the command to perform the test execution. The return
code is enqueued so the scheduler can determine if the run has
completed
"""
filename = "runs/static/runs/logs/{}.txt".format(self.run_id)
os.makedirs(os.path.dirname(filename), exist_ok=True)
with open(filename, "w") as f:
CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format(
self.test_name)
return_code = subprocess.call(CMD, stdout=f, shell=True)
self.queue.put((self.run_id, return_code))
# run()
|
Fix bug related to creating the log directory
|
Fix bug related to creating the log directory
|
Python
|
mit
|
jfelipefilho/test-manager,jfelipefilho/test-manager,jfelipefilho/test-manager
|
import threading
import subprocess
+ import os
class TestExecutor(threading.Thread):
"""
The general thread to perform the tests executions
"""
def __init__(self, run_id, test_name, queue):
super().__init__()
self.run_id = run_id
self.test_name = test_name
self.queue = queue
# __init __()
def run(self):
"""
Execute the command to perform the test execution. The return
code is enqueued so the scheduler can determine if the run has
completed
"""
+ filename = "runs/static/runs/logs/{}.txt".format(self.run_id)
+ os.makedirs(os.path.dirname(filename), exist_ok=True)
+ with open(filename, "w") as f:
- with open(
- "runs/static/runs/autotests/runs/{}.txt".format(
- self.run_id), "w") as f:
-
CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format(
self.test_name)
return_code = subprocess.call(CMD, stdout=f, shell=True)
self.queue.put((self.run_id, return_code))
# run()
|
Fix bug related to creating the log directory
|
## Code Before:
import threading
import subprocess
class TestExecutor(threading.Thread):
"""
The general thread to perform the tests executions
"""
def __init__(self, run_id, test_name, queue):
super().__init__()
self.run_id = run_id
self.test_name = test_name
self.queue = queue
# __init __()
def run(self):
"""
Execute the command to perform the test execution. The return
code is enqueued so the scheduler can determine if the run has
completed
"""
with open(
"runs/static/runs/autotests/runs/{}.txt".format(
self.run_id), "w") as f:
CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format(
self.test_name)
return_code = subprocess.call(CMD, stdout=f, shell=True)
self.queue.put((self.run_id, return_code))
# run()
## Instruction:
Fix bug related to creating the log directory
## Code After:
import threading
import subprocess
import os
class TestExecutor(threading.Thread):
"""
The general thread to perform the tests executions
"""
def __init__(self, run_id, test_name, queue):
super().__init__()
self.run_id = run_id
self.test_name = test_name
self.queue = queue
# __init __()
def run(self):
"""
Execute the command to perform the test execution. The return
code is enqueued so the scheduler can determine if the run has
completed
"""
filename = "runs/static/runs/logs/{}.txt".format(self.run_id)
os.makedirs(os.path.dirname(filename), exist_ok=True)
with open(filename, "w") as f:
CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format(
self.test_name)
return_code = subprocess.call(CMD, stdout=f, shell=True)
self.queue.put((self.run_id, return_code))
# run()
|
import threading
import subprocess
+ import os
class TestExecutor(threading.Thread):
"""
The general thread to perform the tests executions
"""
def __init__(self, run_id, test_name, queue):
super().__init__()
self.run_id = run_id
self.test_name = test_name
self.queue = queue
# __init __()
def run(self):
"""
Execute the command to perform the test execution. The return
code is enqueued so the scheduler can determine if the run has
completed
"""
+ filename = "runs/static/runs/logs/{}.txt".format(self.run_id)
+ os.makedirs(os.path.dirname(filename), exist_ok=True)
+ with open(filename, "w") as f:
- with open(
- "runs/static/runs/autotests/runs/{}.txt".format(
- self.run_id), "w") as f:
-
CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format(
self.test_name)
return_code = subprocess.call(CMD, stdout=f, shell=True)
self.queue.put((self.run_id, return_code))
# run()
|
df57b55c8ffa2a1948d7442d041415a3f19bbca0
|
python/Cloudbot/bbm.py
|
python/Cloudbot/bbm.py
|
from cloudbot import hook
@hook.command("bbmstaff")
def bbmStaff(text, message, chan):
if chan in ("#bbm-bots", "#bbm-dev", "#bbm-packs", "#builtbrokenmodding", "#builtbroken"):
message("Owners: Dmodoomsirius, DarkGuardsman");
message("textureArtist: Morton0000");
message("Developers: Snow, Hennamann")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("sponsor")
def sponsor(text, message, chan):
if chan in ("#BBM-bots", "#BBM-Dev", "#BBM-Packs", "#BuiltBrokenModding", "#BuiltBroken"):
message("BuiltBroken servers both Beta test servers and build server");
message("is sponsored by Akliz.");
message("http://www.akliz.net/bbm")
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
|
from cloudbot import hook
bbmChannels = ["#bbm-bots","#bbm-dev","#builtbroken","#builtbrokenmodding","#bbm-packs","#icbm","#artillects "]
@hook.command("bbmstaff")
def bbmStaff(text, message, chan):
if any(x in chan for x in bbmChannels):
message("Owners: Dmodoomsirius, DarkGuardsman");
#message("Texture Artist: Morton0000");
message("Senior Developers: Kolatra")
message("Junior Developers: Kolatra, shobu9, TheCowGod, Hennamann")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("sponsor")
def sponsor(text, message, chan):
if any(x in chan for x in bbmChannels):
message("BuildBrokenModding servers both Beta test servers and build server");
message("is sponsored by Akliz.");
message("http://www.akliz.net/bbm")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("bbmchan")
def chans(text, message):
message("The official BuiltBroken Channels are: " + " , ".join(bbmChannels))
@hook.command("bbmhelp")
def bbmhelp(text, message):
message("If you are looking for who is the staff type .bbmstaff")
message ("if you are looking for our sponsors please type .sponsor")
message("If you are looking for our official channels please do .bbmchan")
|
Update and add more commands.
|
Update and add more commands.
|
Python
|
unknown
|
dmodoomsirius/DmodCode,dmodoomsirius/DmodCode,dsirius/DmodCode,dmodoomsirius/DmodCode,dsirius/DmodCode,dsirius/DmodCode
|
from cloudbot import hook
+ bbmChannels = ["#bbm-bots","#bbm-dev","#builtbroken","#builtbrokenmodding","#bbm-packs","#icbm","#artillects "]
@hook.command("bbmstaff")
def bbmStaff(text, message, chan):
- if chan in ("#bbm-bots", "#bbm-dev", "#bbm-packs", "#builtbrokenmodding", "#builtbroken"):
+ if any(x in chan for x in bbmChannels):
- message("Owners: Dmodoomsirius, DarkGuardsman");
+ message("Owners: Dmodoomsirius, DarkGuardsman");
- message("textureArtist: Morton0000");
+ #message("Texture Artist: Morton0000");
- message("Developers: Snow, Hennamann")
+ message("Senior Developers: Kolatra")
+ message("Junior Developers: Kolatra, shobu9, TheCowGod, Hennamann")
else:
- message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
+ message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
+
-
-
@hook.command("sponsor")
def sponsor(text, message, chan):
- if chan in ("#BBM-bots", "#BBM-Dev", "#BBM-Packs", "#BuiltBrokenModding", "#BuiltBroken"):
+ if any(x in chan for x in bbmChannels):
- message("BuiltBroken servers both Beta test servers and build server");
+ message("BuildBrokenModding servers both Beta test servers and build server");
- message("is sponsored by Akliz.");
+ message("is sponsored by Akliz.");
- message("http://www.akliz.net/bbm")
+ message("http://www.akliz.net/bbm")
+ else:
- message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
+ message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
+
+ @hook.command("bbmchan")
+ def chans(text, message):
+ message("The official BuiltBroken Channels are: " + " , ".join(bbmChannels))
+
+ @hook.command("bbmhelp")
+ def bbmhelp(text, message):
+ message("If you are looking for who is the staff type .bbmstaff")
+ message ("if you are looking for our sponsors please type .sponsor")
+ message("If you are looking for our official channels please do .bbmchan")
|
Update and add more commands.
|
## Code Before:
from cloudbot import hook
@hook.command("bbmstaff")
def bbmStaff(text, message, chan):
if chan in ("#bbm-bots", "#bbm-dev", "#bbm-packs", "#builtbrokenmodding", "#builtbroken"):
message("Owners: Dmodoomsirius, DarkGuardsman");
message("textureArtist: Morton0000");
message("Developers: Snow, Hennamann")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("sponsor")
def sponsor(text, message, chan):
if chan in ("#BBM-bots", "#BBM-Dev", "#BBM-Packs", "#BuiltBrokenModding", "#BuiltBroken"):
message("BuiltBroken servers both Beta test servers and build server");
message("is sponsored by Akliz.");
message("http://www.akliz.net/bbm")
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
## Instruction:
Update and add more commands.
## Code After:
from cloudbot import hook
bbmChannels = ["#bbm-bots","#bbm-dev","#builtbroken","#builtbrokenmodding","#bbm-packs","#icbm","#artillects "]
@hook.command("bbmstaff")
def bbmStaff(text, message, chan):
if any(x in chan for x in bbmChannels):
message("Owners: Dmodoomsirius, DarkGuardsman");
#message("Texture Artist: Morton0000");
message("Senior Developers: Kolatra")
message("Junior Developers: Kolatra, shobu9, TheCowGod, Hennamann")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("sponsor")
def sponsor(text, message, chan):
if any(x in chan for x in bbmChannels):
message("BuildBrokenModding servers both Beta test servers and build server");
message("is sponsored by Akliz.");
message("http://www.akliz.net/bbm")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("bbmchan")
def chans(text, message):
message("The official BuiltBroken Channels are: " + " , ".join(bbmChannels))
@hook.command("bbmhelp")
def bbmhelp(text, message):
message("If you are looking for who is the staff type .bbmstaff")
message ("if you are looking for our sponsors please type .sponsor")
message("If you are looking for our official channels please do .bbmchan")
|
from cloudbot import hook
+ bbmChannels = ["#bbm-bots","#bbm-dev","#builtbroken","#builtbrokenmodding","#bbm-packs","#icbm","#artillects "]
@hook.command("bbmstaff")
def bbmStaff(text, message, chan):
- if chan in ("#bbm-bots", "#bbm-dev", "#bbm-packs", "#builtbrokenmodding", "#builtbroken"):
+ if any(x in chan for x in bbmChannels):
- message("Owners: Dmodoomsirius, DarkGuardsman");
+ message("Owners: Dmodoomsirius, DarkGuardsman");
? ++++
- message("textureArtist: Morton0000");
? ^
+ #message("Texture Artist: Morton0000");
? +++++ ^ +
- message("Developers: Snow, Hennamann")
+ message("Senior Developers: Kolatra")
+ message("Junior Developers: Kolatra, shobu9, TheCowGod, Hennamann")
else:
- message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
+ message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
? ++++
+
-
-
@hook.command("sponsor")
def sponsor(text, message, chan):
- if chan in ("#BBM-bots", "#BBM-Dev", "#BBM-Packs", "#BuiltBrokenModding", "#BuiltBroken"):
+ if any(x in chan for x in bbmChannels):
- message("BuiltBroken servers both Beta test servers and build server");
? -------- ^
+ message("BuildBrokenModding servers both Beta test servers and build server");
? ^ +++++++
- message("is sponsored by Akliz.");
? --------
+ message("is sponsored by Akliz.");
- message("http://www.akliz.net/bbm")
? --------
+ message("http://www.akliz.net/bbm")
+ else:
- message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
+ message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
? ++++++++
+
+ @hook.command("bbmchan")
+ def chans(text, message):
+ message("The official BuiltBroken Channels are: " + " , ".join(bbmChannels))
+
+ @hook.command("bbmhelp")
+ def bbmhelp(text, message):
+ message("If you are looking for who is the staff type .bbmstaff")
+ message ("if you are looking for our sponsors please type .sponsor")
+ message("If you are looking for our official channels please do .bbmchan")
|
91c33bdeea9214c9594d2d3f9bd1255403d62034
|
notify_levure_app_of_save.py
|
notify_levure_app_of_save.py
|
import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
|
import sublime
import sublime_plugin
import re
import socket
import urllib
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
host ="localhost"
port = 62475
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #socket.SOCK_DGRAM
s.connect((host,port))
query = {'stack': stack_name, 'filename': view.file_name()}
data = urllib.parse.urlencode(query) + "\n"
s.send(data.encode())
data = s.recv(1024).decode()
s.close()
if data != 'success':
print('error updating script in LiveCode: ' + data)
else:
print('script updated in LiveCode')
|
Send update notification to server
|
Send update notification to server
|
Python
|
mit
|
trevordevore/livecode-sublimetext
|
import sublime
import sublime_plugin
import re
+ import socket
+ import urllib
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
- print('stack name and filename', stack_name, view.file_name())
+ host ="localhost"
+ port = 62475
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #socket.SOCK_DGRAM
+ s.connect((host,port))
+ query = {'stack': stack_name, 'filename': view.file_name()}
+ data = urllib.parse.urlencode(query) + "\n"
+ s.send(data.encode())
+ data = s.recv(1024).decode()
+ s.close()
+ if data != 'success':
+ print('error updating script in LiveCode: ' + data)
+ else:
+ print('script updated in LiveCode')
+
|
Send update notification to server
|
## Code Before:
import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
## Instruction:
Send update notification to server
## Code After:
import sublime
import sublime_plugin
import re
import socket
import urllib
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
host ="localhost"
port = 62475
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #socket.SOCK_DGRAM
s.connect((host,port))
query = {'stack': stack_name, 'filename': view.file_name()}
data = urllib.parse.urlencode(query) + "\n"
s.send(data.encode())
data = s.recv(1024).decode()
s.close()
if data != 'success':
print('error updating script in LiveCode: ' + data)
else:
print('script updated in LiveCode')
|
import sublime
import sublime_plugin
import re
+ import socket
+ import urllib
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
- print('stack name and filename', stack_name, view.file_name())
+ host ="localhost"
+ port = 62475
+
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #socket.SOCK_DGRAM
+ s.connect((host,port))
+ query = {'stack': stack_name, 'filename': view.file_name()}
+ data = urllib.parse.urlencode(query) + "\n"
+ s.send(data.encode())
+ data = s.recv(1024).decode()
+ s.close()
+ if data != 'success':
+ print('error updating script in LiveCode: ' + data)
+ else:
+ print('script updated in LiveCode')
|
879bdbcddb582d5f4768fe7042380397514767d3
|
servo.py
|
servo.py
|
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
SERVO = 17
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
|
#################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARDs)
SERVO = 11
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
|
Update pin to gpio.board (from gpio.bcm)
|
fix: Update pin to gpio.board (from gpio.bcm)
|
Python
|
mit
|
DreamN/Smart-Tollbooth,DreamN/Smart-Tollbooth,DreamN/Smart-Tollbooth,DreamN/Smart-Tollbooth
|
+ #################################################################
+ ## SERVO LIB for SMART TOLLBOOTH PROJECT ##
+ #################################################################
import RPi.GPIO as GPIO
import time
- GPIO.setmode(GPIO.BCM)
+ GPIO.setmode(GPIO.BOARDs)
- SERVO = 17
+ SERVO = 11
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
+
|
Update pin to gpio.board (from gpio.bcm)
|
## Code Before:
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
SERVO = 17
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
## Instruction:
Update pin to gpio.board (from gpio.bcm)
## Code After:
#################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARDs)
SERVO = 11
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
|
+ #################################################################
+ ## SERVO LIB for SMART TOLLBOOTH PROJECT ##
+ #################################################################
import RPi.GPIO as GPIO
import time
- GPIO.setmode(GPIO.BCM)
? ^^
+ GPIO.setmode(GPIO.BOARDs)
? ^^^^^
- SERVO = 17
? ^
+ SERVO = 11
? ^
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
|
1b9aa9909b284489c9f8a5d38b1c5520d5916dc7
|
feature_extraction/measurements/__init__.py
|
feature_extraction/measurements/__init__.py
|
from collections import defaultdict
from feature_extraction.util import DefaultAttributeDict
class Measurement(object):
"""
A generic feature measurement.
Attributes
----------
default_options
Can be set by subclasses to set default option values
"""
default_options = {}
def __init__(self, options=None):
"""
When initializing this measurement, options can be passed.
These are exposed to internal algorithms as `self.options`.
Parameters
----------
options : dict
A dict of options for this measurement.
"""
self.options = DefaultAttributeDict()
self.options.update(self.default_options or {})
self.options.update(options or {})
from .pixelaverage import PixelAverage
from .texture_haralick import HaralickTexture
|
from collections import defaultdict
from feature_extraction.util import AttributeDict
class Measurement(object):
"""
A generic feature measurement.
Attributes
----------
default_options
Can be set by subclasses to set default option values
"""
default_options = {}
def __init__(self, options=None):
"""
When initializing this measurement, options can be passed.
These are exposed to internal algorithms as `self.options`.
Parameters
----------
options : dict
A dict of options for this measurement.
"""
self.options = AttributeDict()
self.options.update(self.default_options or {})
self.options.update(options or {})
from .pixelaverage import PixelAverage
from .texture_haralick import HaralickTexture
|
Switch back to AttributeDict for measurement options
|
Switch back to AttributeDict for measurement options
|
Python
|
apache-2.0
|
widoptimization-willett/feature-extraction
|
from collections import defaultdict
- from feature_extraction.util import DefaultAttributeDict
+ from feature_extraction.util import AttributeDict
class Measurement(object):
"""
A generic feature measurement.
Attributes
----------
default_options
Can be set by subclasses to set default option values
"""
default_options = {}
def __init__(self, options=None):
"""
When initializing this measurement, options can be passed.
These are exposed to internal algorithms as `self.options`.
Parameters
----------
options : dict
A dict of options for this measurement.
"""
- self.options = DefaultAttributeDict()
+ self.options = AttributeDict()
self.options.update(self.default_options or {})
self.options.update(options or {})
from .pixelaverage import PixelAverage
from .texture_haralick import HaralickTexture
|
Switch back to AttributeDict for measurement options
|
## Code Before:
from collections import defaultdict
from feature_extraction.util import DefaultAttributeDict
class Measurement(object):
"""
A generic feature measurement.
Attributes
----------
default_options
Can be set by subclasses to set default option values
"""
default_options = {}
def __init__(self, options=None):
"""
When initializing this measurement, options can be passed.
These are exposed to internal algorithms as `self.options`.
Parameters
----------
options : dict
A dict of options for this measurement.
"""
self.options = DefaultAttributeDict()
self.options.update(self.default_options or {})
self.options.update(options or {})
from .pixelaverage import PixelAverage
from .texture_haralick import HaralickTexture
## Instruction:
Switch back to AttributeDict for measurement options
## Code After:
from collections import defaultdict
from feature_extraction.util import AttributeDict
class Measurement(object):
"""
A generic feature measurement.
Attributes
----------
default_options
Can be set by subclasses to set default option values
"""
default_options = {}
def __init__(self, options=None):
"""
When initializing this measurement, options can be passed.
These are exposed to internal algorithms as `self.options`.
Parameters
----------
options : dict
A dict of options for this measurement.
"""
self.options = AttributeDict()
self.options.update(self.default_options or {})
self.options.update(options or {})
from .pixelaverage import PixelAverage
from .texture_haralick import HaralickTexture
|
from collections import defaultdict
- from feature_extraction.util import DefaultAttributeDict
? -------
+ from feature_extraction.util import AttributeDict
class Measurement(object):
"""
A generic feature measurement.
Attributes
----------
default_options
Can be set by subclasses to set default option values
"""
default_options = {}
def __init__(self, options=None):
"""
When initializing this measurement, options can be passed.
These are exposed to internal algorithms as `self.options`.
Parameters
----------
options : dict
A dict of options for this measurement.
"""
- self.options = DefaultAttributeDict()
? -------
+ self.options = AttributeDict()
self.options.update(self.default_options or {})
self.options.update(options or {})
from .pixelaverage import PixelAverage
from .texture_haralick import HaralickTexture
|
2b0bcbb7ce82171965b22cf657439d6263fa9d91
|
geojson_scraper.py
|
geojson_scraper.py
|
import json
import os
import urllib.request
from retry import retry
from urllib.error import HTTPError
from common import store_history, truncate, summarise
# hack to override sqlite database filename
# see: https://help.morph.io/t/using-python-3-with-morph-scraperwiki-fork/148
os.environ['SCRAPERWIKI_DATABASE_NAME'] = 'sqlite:///data.sqlite'
import scraperwiki
@retry(HTTPError, tries=2, delay=30)
def scrape(url, council_id, encoding, table):
with urllib.request.urlopen(url) as response:
# clear any existing data
truncate(table)
# load json
data_str = response.read()
data = json.loads(data_str.decode(encoding))
print("found %i %s" % (len(data['features']), table))
for feature in data['features']:
# assemble record
record = {
'pk': feature['id'],
'council_id': council_id,
'geometry': json.dumps(feature),
}
for field in feature['properties']:
if field != 'bbox':
record[field] = feature['properties'][field]
# save to db
scraperwiki.sqlite.save(
unique_keys=['pk'],
data=record,
table_name=table)
scraperwiki.sqlite.commit_transactions()
# print summary
summarise(table)
store_history(data_str, table)
|
import json
import os
import urllib.request
from retry import retry
from urllib.error import HTTPError
from common import store_history, truncate, summarise
# hack to override sqlite database filename
# see: https://help.morph.io/t/using-python-3-with-morph-scraperwiki-fork/148
os.environ['SCRAPERWIKI_DATABASE_NAME'] = 'sqlite:///data.sqlite'
import scraperwiki
@retry(HTTPError, tries=2, delay=30)
def scrape(url, council_id, encoding, table, key=None):
with urllib.request.urlopen(url) as response:
# clear any existing data
truncate(table)
# load json
data_str = response.read()
data = json.loads(data_str.decode(encoding))
print("found %i %s" % (len(data['features']), table))
for feature in data['features']:
# assemble record
record = {
'council_id': council_id,
'geometry': json.dumps(feature),
}
if key is None:
record['pk'] = feature['id']
else:
record['pk'] = feature['properties'][key]
for field in feature['properties']:
if field != 'bbox':
record[field] = feature['properties'][field]
# save to db
scraperwiki.sqlite.save(
unique_keys=['pk'],
data=record,
table_name=table)
scraperwiki.sqlite.commit_transactions()
# print summary
summarise(table)
store_history(data_str, table)
|
Add key param to geojson scraper
|
Add key param to geojson scraper
Sometimes we encounter a geojson file with no 'id' attribute
This allows us to specify a property to use as a key instead
|
Python
|
mit
|
wdiv-scrapers/dc-base-scrapers
|
import json
import os
import urllib.request
from retry import retry
from urllib.error import HTTPError
from common import store_history, truncate, summarise
# hack to override sqlite database filename
# see: https://help.morph.io/t/using-python-3-with-morph-scraperwiki-fork/148
os.environ['SCRAPERWIKI_DATABASE_NAME'] = 'sqlite:///data.sqlite'
import scraperwiki
@retry(HTTPError, tries=2, delay=30)
- def scrape(url, council_id, encoding, table):
+ def scrape(url, council_id, encoding, table, key=None):
with urllib.request.urlopen(url) as response:
# clear any existing data
truncate(table)
# load json
data_str = response.read()
data = json.loads(data_str.decode(encoding))
print("found %i %s" % (len(data['features']), table))
for feature in data['features']:
# assemble record
record = {
- 'pk': feature['id'],
'council_id': council_id,
'geometry': json.dumps(feature),
}
+ if key is None:
+ record['pk'] = feature['id']
+ else:
+ record['pk'] = feature['properties'][key]
for field in feature['properties']:
if field != 'bbox':
record[field] = feature['properties'][field]
# save to db
scraperwiki.sqlite.save(
unique_keys=['pk'],
data=record,
table_name=table)
scraperwiki.sqlite.commit_transactions()
# print summary
summarise(table)
store_history(data_str, table)
|
Add key param to geojson scraper
|
## Code Before:
import json
import os
import urllib.request
from retry import retry
from urllib.error import HTTPError
from common import store_history, truncate, summarise
# hack to override sqlite database filename
# see: https://help.morph.io/t/using-python-3-with-morph-scraperwiki-fork/148
os.environ['SCRAPERWIKI_DATABASE_NAME'] = 'sqlite:///data.sqlite'
import scraperwiki
@retry(HTTPError, tries=2, delay=30)
def scrape(url, council_id, encoding, table):
with urllib.request.urlopen(url) as response:
# clear any existing data
truncate(table)
# load json
data_str = response.read()
data = json.loads(data_str.decode(encoding))
print("found %i %s" % (len(data['features']), table))
for feature in data['features']:
# assemble record
record = {
'pk': feature['id'],
'council_id': council_id,
'geometry': json.dumps(feature),
}
for field in feature['properties']:
if field != 'bbox':
record[field] = feature['properties'][field]
# save to db
scraperwiki.sqlite.save(
unique_keys=['pk'],
data=record,
table_name=table)
scraperwiki.sqlite.commit_transactions()
# print summary
summarise(table)
store_history(data_str, table)
## Instruction:
Add key param to geojson scraper
## Code After:
import json
import os
import urllib.request
from retry import retry
from urllib.error import HTTPError
from common import store_history, truncate, summarise
# hack to override sqlite database filename
# see: https://help.morph.io/t/using-python-3-with-morph-scraperwiki-fork/148
os.environ['SCRAPERWIKI_DATABASE_NAME'] = 'sqlite:///data.sqlite'
import scraperwiki
@retry(HTTPError, tries=2, delay=30)
def scrape(url, council_id, encoding, table, key=None):
with urllib.request.urlopen(url) as response:
# clear any existing data
truncate(table)
# load json
data_str = response.read()
data = json.loads(data_str.decode(encoding))
print("found %i %s" % (len(data['features']), table))
for feature in data['features']:
# assemble record
record = {
'council_id': council_id,
'geometry': json.dumps(feature),
}
if key is None:
record['pk'] = feature['id']
else:
record['pk'] = feature['properties'][key]
for field in feature['properties']:
if field != 'bbox':
record[field] = feature['properties'][field]
# save to db
scraperwiki.sqlite.save(
unique_keys=['pk'],
data=record,
table_name=table)
scraperwiki.sqlite.commit_transactions()
# print summary
summarise(table)
store_history(data_str, table)
|
import json
import os
import urllib.request
from retry import retry
from urllib.error import HTTPError
from common import store_history, truncate, summarise
# hack to override sqlite database filename
# see: https://help.morph.io/t/using-python-3-with-morph-scraperwiki-fork/148
os.environ['SCRAPERWIKI_DATABASE_NAME'] = 'sqlite:///data.sqlite'
import scraperwiki
@retry(HTTPError, tries=2, delay=30)
- def scrape(url, council_id, encoding, table):
+ def scrape(url, council_id, encoding, table, key=None):
? ++++++++++
with urllib.request.urlopen(url) as response:
# clear any existing data
truncate(table)
# load json
data_str = response.read()
data = json.loads(data_str.decode(encoding))
print("found %i %s" % (len(data['features']), table))
for feature in data['features']:
# assemble record
record = {
- 'pk': feature['id'],
'council_id': council_id,
'geometry': json.dumps(feature),
}
+ if key is None:
+ record['pk'] = feature['id']
+ else:
+ record['pk'] = feature['properties'][key]
for field in feature['properties']:
if field != 'bbox':
record[field] = feature['properties'][field]
# save to db
scraperwiki.sqlite.save(
unique_keys=['pk'],
data=record,
table_name=table)
scraperwiki.sqlite.commit_transactions()
# print summary
summarise(table)
store_history(data_str, table)
|
65731fff94cd18a0d196c463b5e2aee444027d77
|
salt/utils/pycrypto.py
|
salt/utils/pycrypto.py
|
'''
Use pycrypto to generate random passwords on the fly.
'''
# Import python libraries
try:
import Crypto.Random # pylint: disable=E0611
HAS_RANDOM = True
except ImportError:
HAS_RANDOM = False
import crypt
import re
def secure_password(length=20):
'''
Generate a secure password.
'''
if not HAS_RANDOM:
raise ImportError('generating passwords requires >= pycrypto v2.1.0')
pw = ''
while len(pw) < length:
pw += re.sub(r'\W', '', Crypto.Random.get_random_bytes(1))
return pw
def gen_hash(salt=None, password=None):
'''
Generate /etc/shadow hash
'''
if password is None:
password = secure_password()
if salt is None:
salt = '$6' + secure_password(8)
return crypt.crypt(password, salt)
|
'''
Use pycrypto to generate random passwords on the fly.
'''
# Import python libraries
try:
import Crypto.Random # pylint: disable=E0611
HAS_RANDOM = True
except ImportError:
HAS_RANDOM = False
import crypt
import re
import salt.exceptions
def secure_password(length=20):
'''
Generate a secure password.
'''
if not HAS_RANDOM:
raise ImportError('generating passwords requires >= pycrypto v2.1.0')
pw = ''
while len(pw) < length:
pw += re.sub(r'\W', '', Crypto.Random.get_random_bytes(1))
return pw
def gen_hash(salt=None, password=None, algorithm='sha512'):
'''
Generate /etc/shadow hash
'''
hash_algorithms = {'md5':'$1$', 'blowfish':'$2a$', 'sha256':'$5$', 'sha512':'$6$'}
if algorithm not in hash_algorithms:
raise salt.exceptions.SaltInvocationError('Not support {0} algorithm'.format(algorithm))
if password is None:
password = secure_password()
if salt is None:
salt = secure_password(8)
salt = hash_algorithms[algorithm] + salt
return crypt.crypt(password, salt)
|
Add algorithm argument to get_hash
|
Add algorithm argument to get_hash
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Use pycrypto to generate random passwords on the fly.
'''
# Import python libraries
try:
import Crypto.Random # pylint: disable=E0611
HAS_RANDOM = True
except ImportError:
HAS_RANDOM = False
import crypt
import re
+ import salt.exceptions
def secure_password(length=20):
'''
Generate a secure password.
'''
if not HAS_RANDOM:
raise ImportError('generating passwords requires >= pycrypto v2.1.0')
pw = ''
while len(pw) < length:
pw += re.sub(r'\W', '', Crypto.Random.get_random_bytes(1))
return pw
- def gen_hash(salt=None, password=None):
+ def gen_hash(salt=None, password=None, algorithm='sha512'):
'''
Generate /etc/shadow hash
'''
+ hash_algorithms = {'md5':'$1$', 'blowfish':'$2a$', 'sha256':'$5$', 'sha512':'$6$'}
+ if algorithm not in hash_algorithms:
+ raise salt.exceptions.SaltInvocationError('Not support {0} algorithm'.format(algorithm))
if password is None:
password = secure_password()
if salt is None:
- salt = '$6' + secure_password(8)
+ salt = secure_password(8)
+
+ salt = hash_algorithms[algorithm] + salt
return crypt.crypt(password, salt)
|
Add algorithm argument to get_hash
|
## Code Before:
'''
Use pycrypto to generate random passwords on the fly.
'''
# Import python libraries
try:
import Crypto.Random # pylint: disable=E0611
HAS_RANDOM = True
except ImportError:
HAS_RANDOM = False
import crypt
import re
def secure_password(length=20):
'''
Generate a secure password.
'''
if not HAS_RANDOM:
raise ImportError('generating passwords requires >= pycrypto v2.1.0')
pw = ''
while len(pw) < length:
pw += re.sub(r'\W', '', Crypto.Random.get_random_bytes(1))
return pw
def gen_hash(salt=None, password=None):
'''
Generate /etc/shadow hash
'''
if password is None:
password = secure_password()
if salt is None:
salt = '$6' + secure_password(8)
return crypt.crypt(password, salt)
## Instruction:
Add algorithm argument to get_hash
## Code After:
'''
Use pycrypto to generate random passwords on the fly.
'''
# Import python libraries
try:
import Crypto.Random # pylint: disable=E0611
HAS_RANDOM = True
except ImportError:
HAS_RANDOM = False
import crypt
import re
import salt.exceptions
def secure_password(length=20):
'''
Generate a secure password.
'''
if not HAS_RANDOM:
raise ImportError('generating passwords requires >= pycrypto v2.1.0')
pw = ''
while len(pw) < length:
pw += re.sub(r'\W', '', Crypto.Random.get_random_bytes(1))
return pw
def gen_hash(salt=None, password=None, algorithm='sha512'):
'''
Generate /etc/shadow hash
'''
hash_algorithms = {'md5':'$1$', 'blowfish':'$2a$', 'sha256':'$5$', 'sha512':'$6$'}
if algorithm not in hash_algorithms:
raise salt.exceptions.SaltInvocationError('Not support {0} algorithm'.format(algorithm))
if password is None:
password = secure_password()
if salt is None:
salt = secure_password(8)
salt = hash_algorithms[algorithm] + salt
return crypt.crypt(password, salt)
|
'''
Use pycrypto to generate random passwords on the fly.
'''
# Import python libraries
try:
import Crypto.Random # pylint: disable=E0611
HAS_RANDOM = True
except ImportError:
HAS_RANDOM = False
import crypt
import re
+ import salt.exceptions
def secure_password(length=20):
'''
Generate a secure password.
'''
if not HAS_RANDOM:
raise ImportError('generating passwords requires >= pycrypto v2.1.0')
pw = ''
while len(pw) < length:
pw += re.sub(r'\W', '', Crypto.Random.get_random_bytes(1))
return pw
- def gen_hash(salt=None, password=None):
+ def gen_hash(salt=None, password=None, algorithm='sha512'):
? ++++++++++++++++++++
'''
Generate /etc/shadow hash
'''
+ hash_algorithms = {'md5':'$1$', 'blowfish':'$2a$', 'sha256':'$5$', 'sha512':'$6$'}
+ if algorithm not in hash_algorithms:
+ raise salt.exceptions.SaltInvocationError('Not support {0} algorithm'.format(algorithm))
if password is None:
password = secure_password()
if salt is None:
- salt = '$6' + secure_password(8)
? -------
+ salt = secure_password(8)
+
+ salt = hash_algorithms[algorithm] + salt
return crypt.crypt(password, salt)
|
c74b3a4d80b8d7002b6836a421cf2b3032377545
|
filterable.py
|
filterable.py
|
class Filterable:
no_delays_filter = lambda filterable: filterable.condition.record_id == str(6)
query_delay_filter = lambda filterable: filterable.condition.record_id == str(7)
document_delay_filter = lambda filterable: filterable.condition.record_id == str(8)
combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9)
identity_filter = lambda filterable: True
@staticmethod
def combine_filters( *filters ):
return lambda filterable: all([fil( filterable ) for fil in filters])
|
class Filterable:
no_delays_filter = lambda filterable: filterable.condition.record_id == str(6)
query_delay_filter = lambda filterable: filterable.condition.record_id == str(7)
document_delay_filter = lambda filterable: filterable.condition.record_id == str(8)
combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9)
practice_topic_reject_filter = lambda filterable: filterable.topic.record_id != str(367)
identity_filter = lambda filterable: True
@staticmethod
def combine_filters( *filters ):
return lambda filterable: all([fil( filterable ) for fil in filters])
|
Add filter for rejecting practice topic
|
Add filter for rejecting practice topic
|
Python
|
mit
|
fire-uta/iiix-data-parser
|
class Filterable:
no_delays_filter = lambda filterable: filterable.condition.record_id == str(6)
query_delay_filter = lambda filterable: filterable.condition.record_id == str(7)
document_delay_filter = lambda filterable: filterable.condition.record_id == str(8)
combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9)
+
+ practice_topic_reject_filter = lambda filterable: filterable.topic.record_id != str(367)
identity_filter = lambda filterable: True
@staticmethod
def combine_filters( *filters ):
return lambda filterable: all([fil( filterable ) for fil in filters])
|
Add filter for rejecting practice topic
|
## Code Before:
class Filterable:
no_delays_filter = lambda filterable: filterable.condition.record_id == str(6)
query_delay_filter = lambda filterable: filterable.condition.record_id == str(7)
document_delay_filter = lambda filterable: filterable.condition.record_id == str(8)
combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9)
identity_filter = lambda filterable: True
@staticmethod
def combine_filters( *filters ):
return lambda filterable: all([fil( filterable ) for fil in filters])
## Instruction:
Add filter for rejecting practice topic
## Code After:
class Filterable:
no_delays_filter = lambda filterable: filterable.condition.record_id == str(6)
query_delay_filter = lambda filterable: filterable.condition.record_id == str(7)
document_delay_filter = lambda filterable: filterable.condition.record_id == str(8)
combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9)
practice_topic_reject_filter = lambda filterable: filterable.topic.record_id != str(367)
identity_filter = lambda filterable: True
@staticmethod
def combine_filters( *filters ):
return lambda filterable: all([fil( filterable ) for fil in filters])
|
class Filterable:
no_delays_filter = lambda filterable: filterable.condition.record_id == str(6)
query_delay_filter = lambda filterable: filterable.condition.record_id == str(7)
document_delay_filter = lambda filterable: filterable.condition.record_id == str(8)
combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9)
+ practice_topic_reject_filter = lambda filterable: filterable.topic.record_id != str(367)
+
identity_filter = lambda filterable: True
@staticmethod
def combine_filters( *filters ):
return lambda filterable: all([fil( filterable ) for fil in filters])
|
3bef86bd3637642587ed15680249c278504fc4fb
|
pontoon/administration/management/commands/update_projects.py
|
pontoon/administration/management/commands/update_projects.py
|
import os
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
self.stdout.write('Successfully updated project "%s"' % project)
except Exception as e:
raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
|
import os
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
now = datetime.datetime.now()
self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
now = datetime.datetime.now()
raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
|
Add timestamp and newline to log messages
|
Add timestamp and newline to log messages
|
Python
|
bsd-3-clause
|
mathjazz/pontoon,yfdyh000/pontoon,jotes/pontoon,mozilla/pontoon,sudheesh001/pontoon,mathjazz/pontoon,jotes/pontoon,vivekanand1101/pontoon,mastizada/pontoon,jotes/pontoon,m8ttyB/pontoon,Jobava/mirror-pontoon,Jobava/mirror-pontoon,participedia/pontoon,jotes/pontoon,yfdyh000/pontoon,m8ttyB/pontoon,vivekanand1101/pontoon,vivekanand1101/pontoon,sudheesh001/pontoon,sudheesh001/pontoon,Osmose/pontoon,m8ttyB/pontoon,Osmose/pontoon,Osmose/pontoon,participedia/pontoon,sudheesh001/pontoon,Jobava/mirror-pontoon,mastizada/pontoon,Osmose/pontoon,Jobava/mirror-pontoon,mozilla/pontoon,mozilla/pontoon,m8ttyB/pontoon,yfdyh000/pontoon,mathjazz/pontoon,mathjazz/pontoon,mathjazz/pontoon,vivekanand1101/pontoon,mastizada/pontoon,mozilla/pontoon,participedia/pontoon,participedia/pontoon,mozilla/pontoon,mastizada/pontoon,yfdyh000/pontoon
|
import os
+ import datetime
+
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
+ now = datetime.datetime.now()
- self.stdout.write('Successfully updated project "%s"' % project)
+ self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
+ now = datetime.datetime.now()
- raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
+ raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
|
Add timestamp and newline to log messages
|
## Code Before:
import os
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
self.stdout.write('Successfully updated project "%s"' % project)
except Exception as e:
raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
## Instruction:
Add timestamp and newline to log messages
## Code After:
import os
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
now = datetime.datetime.now()
self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
now = datetime.datetime.now()
raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
|
import os
+ import datetime
+
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
+ now = datetime.datetime.now()
- self.stdout.write('Successfully updated project "%s"' % project)
+ self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
? ++++++ ++ ++++++ +
except Exception as e:
+ now = datetime.datetime.now()
- raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
+ raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
? ++++++ ++ ++++++ +
|
7197f1578335b38eb2037e8d82f15a27d786d5c1
|
var/spack/repos/builtin/packages/py-setuptools/package.py
|
var/spack/repos/builtin/packages/py-setuptools/package.py
|
from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('11.3.1', '01f69212e019a2420c1693fb43593930')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
|
from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('11.3.1', '01f69212e019a2420c1693fb43593930')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
version('20.6.7', '45d6110f3ec14924e44c33411db64fe6')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
|
Add version 2.6.7 of py-setuptools
|
Add version 2.6.7 of py-setuptools
|
Python
|
lgpl-2.1
|
skosukhin/spack,mfherbst/spack,tmerrick1/spack,lgarren/spack,skosukhin/spack,krafczyk/spack,iulian787/spack,skosukhin/spack,tmerrick1/spack,skosukhin/spack,matthiasdiener/spack,matthiasdiener/spack,TheTimmy/spack,LLNL/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,mfherbst/spack,matthiasdiener/spack,mfherbst/spack,EmreAtes/spack,iulian787/spack,krafczyk/spack,matthiasdiener/spack,tmerrick1/spack,EmreAtes/spack,skosukhin/spack,matthiasdiener/spack,iulian787/spack,iulian787/spack,lgarren/spack,EmreAtes/spack,TheTimmy/spack,tmerrick1/spack,EmreAtes/spack,iulian787/spack,LLNL/spack,lgarren/spack,LLNL/spack,EmreAtes/spack,LLNL/spack,TheTimmy/spack,krafczyk/spack,mfherbst/spack,lgarren/spack,tmerrick1/spack,lgarren/spack,TheTimmy/spack,krafczyk/spack,TheTimmy/spack
|
from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('11.3.1', '01f69212e019a2420c1693fb43593930')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
+ version('20.6.7', '45d6110f3ec14924e44c33411db64fe6')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
|
Add version 2.6.7 of py-setuptools
|
## Code Before:
from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('11.3.1', '01f69212e019a2420c1693fb43593930')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
## Instruction:
Add version 2.6.7 of py-setuptools
## Code After:
from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('11.3.1', '01f69212e019a2420c1693fb43593930')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
version('20.6.7', '45d6110f3ec14924e44c33411db64fe6')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
|
from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('11.3.1', '01f69212e019a2420c1693fb43593930')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
+ version('20.6.7', '45d6110f3ec14924e44c33411db64fe6')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
|
d3428351e005897f45bec1f4db61d776d2d9a962
|
tests/test_migrate.py
|
tests/test_migrate.py
|
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
import pytest
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_open_old(tmpdir):
# Make sure that opening an old database results in an exception and not
# in data loss
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
with pytest.raises(Exception):
TinyDB(str(db_file))
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
Test that opening an old database fails
|
Test that opening an old database fails
|
Python
|
mit
|
cagnosolutions/tinydb,Callwoola/tinydb,ivankravets/tinydb,raquel-ucl/tinydb,msiemens/tinydb
|
+ import pytest
+
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
+
+
+ def test_open_old(tmpdir):
+ # Make sure that opening an old database results in an exception and not
+ # in data loss
+ db_file = tmpdir.join('db.json')
+ db_file.write(v1_0)
+
+ with pytest.raises(Exception):
+ TinyDB(str(db_file))
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
Test that opening an old database fails
|
## Code Before:
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
## Instruction:
Test that opening an old database fails
## Code After:
import pytest
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_open_old(tmpdir):
# Make sure that opening an old database results in an exception and not
# in data loss
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
with pytest.raises(Exception):
TinyDB(str(db_file))
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
+ import pytest
+
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
+ def test_open_old(tmpdir):
+ # Make sure that opening an old database results in an exception and not
+ # in data loss
+ db_file = tmpdir.join('db.json')
+ db_file.write(v1_0)
+
+ with pytest.raises(Exception):
+ TinyDB(str(db_file))
+
+
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
280e7cf4f7e667ccba796294c58edd5a0f744b70
|
geopandas/__init__.py
|
geopandas/__init__.py
|
try:
from geopandas.version import version as __version__
except ImportError:
__version__ = '0.1.0'
from geopandas.geoseries import GeoSeries
from geopandas.geodataframe import GeoDataFrame
from geopandas.io.file import read_file
from geopandas.io.sql import read_postgis
# make the interactive namespace easier to use
# for `from geopandas import *` demos.
import geopandas as gpd
import pandas as pd
import numpy as np
|
try:
from geopandas.version import version as __version__
except ImportError:
__version__ = '0.2.0.dev-unknown'
from geopandas.geoseries import GeoSeries
from geopandas.geodataframe import GeoDataFrame
from geopandas.io.file import read_file
from geopandas.io.sql import read_postgis
# make the interactive namespace easier to use
# for `from geopandas import *` demos.
import geopandas as gpd
import pandas as pd
import numpy as np
|
Set default version to 0.2.0.dev-unknown
|
BLD: Set default version to 0.2.0.dev-unknown
|
Python
|
bsd-3-clause
|
IamJeffG/geopandas,geopandas/geopandas,perrygeo/geopandas,jorisvandenbossche/geopandas,ozak/geopandas,ozak/geopandas,micahcochran/geopandas,jorisvandenbossche/geopandas,geopandas/geopandas,scw/geopandas,urschrei/geopandas,koldunovn/geopandas,micahcochran/geopandas,jorisvandenbossche/geopandas,kwinkunks/geopandas,jdmcbr/geopandas,fonnesbeck/geopandas,jdmcbr/geopandas,geopandas/geopandas,maxalbert/geopandas,snario/geopandas
|
try:
from geopandas.version import version as __version__
except ImportError:
- __version__ = '0.1.0'
+ __version__ = '0.2.0.dev-unknown'
from geopandas.geoseries import GeoSeries
from geopandas.geodataframe import GeoDataFrame
from geopandas.io.file import read_file
from geopandas.io.sql import read_postgis
# make the interactive namespace easier to use
# for `from geopandas import *` demos.
import geopandas as gpd
import pandas as pd
import numpy as np
|
Set default version to 0.2.0.dev-unknown
|
## Code Before:
try:
from geopandas.version import version as __version__
except ImportError:
__version__ = '0.1.0'
from geopandas.geoseries import GeoSeries
from geopandas.geodataframe import GeoDataFrame
from geopandas.io.file import read_file
from geopandas.io.sql import read_postgis
# make the interactive namespace easier to use
# for `from geopandas import *` demos.
import geopandas as gpd
import pandas as pd
import numpy as np
## Instruction:
Set default version to 0.2.0.dev-unknown
## Code After:
try:
from geopandas.version import version as __version__
except ImportError:
__version__ = '0.2.0.dev-unknown'
from geopandas.geoseries import GeoSeries
from geopandas.geodataframe import GeoDataFrame
from geopandas.io.file import read_file
from geopandas.io.sql import read_postgis
# make the interactive namespace easier to use
# for `from geopandas import *` demos.
import geopandas as gpd
import pandas as pd
import numpy as np
|
try:
from geopandas.version import version as __version__
except ImportError:
- __version__ = '0.1.0'
? ^
+ __version__ = '0.2.0.dev-unknown'
? ^ ++++++++++++
from geopandas.geoseries import GeoSeries
from geopandas.geodataframe import GeoDataFrame
from geopandas.io.file import read_file
from geopandas.io.sql import read_postgis
# make the interactive namespace easier to use
# for `from geopandas import *` demos.
import geopandas as gpd
import pandas as pd
import numpy as np
|
e16e2a669f883480329f41acbd0955920dfc83e2
|
Tools/send2server/s2s.py
|
Tools/send2server/s2s.py
|
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server)"""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server) is designed for use with a public ssh key."""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
Update description. Module needs testing.
|
Update description. Module needs testing.
|
Python
|
mit
|
datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts
|
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
- """S2S (Send 2 Server)"""
+ """S2S (Send 2 Server) is designed for use with a public ssh key."""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
Update description. Module needs testing.
|
## Code Before:
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server)"""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
## Instruction:
Update description. Module needs testing.
## Code After:
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server) is designed for use with a public ssh key."""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
- """S2S (Send 2 Server)"""
+ """S2S (Send 2 Server) is designed for use with a public ssh key."""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.